skilld 0.1.2 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +70 -120
  2. package/dist/_chunks/config.mjs +8 -2
  3. package/dist/_chunks/config.mjs.map +1 -1
  4. package/dist/_chunks/llm.mjs +710 -204
  5. package/dist/_chunks/llm.mjs.map +1 -1
  6. package/dist/_chunks/pool.mjs +115 -0
  7. package/dist/_chunks/pool.mjs.map +1 -0
  8. package/dist/_chunks/releases.mjs +689 -179
  9. package/dist/_chunks/releases.mjs.map +1 -1
  10. package/dist/_chunks/storage.mjs +311 -19
  11. package/dist/_chunks/storage.mjs.map +1 -1
  12. package/dist/_chunks/sync-parallel.mjs +134 -378
  13. package/dist/_chunks/sync-parallel.mjs.map +1 -1
  14. package/dist/_chunks/types.d.mts +9 -6
  15. package/dist/_chunks/types.d.mts.map +1 -1
  16. package/dist/_chunks/utils.d.mts +137 -68
  17. package/dist/_chunks/utils.d.mts.map +1 -1
  18. package/dist/_chunks/version.d.mts +43 -6
  19. package/dist/_chunks/version.d.mts.map +1 -1
  20. package/dist/agent/index.d.mts +58 -15
  21. package/dist/agent/index.d.mts.map +1 -1
  22. package/dist/agent/index.mjs +4 -2
  23. package/dist/cache/index.d.mts +2 -2
  24. package/dist/cache/index.mjs +2 -2
  25. package/dist/cli.mjs +2175 -1435
  26. package/dist/cli.mjs.map +1 -1
  27. package/dist/index.d.mts +4 -3
  28. package/dist/index.mjs +2 -2
  29. package/dist/retriv/index.d.mts +16 -2
  30. package/dist/retriv/index.d.mts.map +1 -1
  31. package/dist/retriv/index.mjs +44 -15
  32. package/dist/retriv/index.mjs.map +1 -1
  33. package/dist/retriv/worker.d.mts +33 -0
  34. package/dist/retriv/worker.d.mts.map +1 -0
  35. package/dist/retriv/worker.mjs +47 -0
  36. package/dist/retriv/worker.mjs.map +1 -0
  37. package/dist/sources/index.d.mts +2 -2
  38. package/dist/sources/index.mjs +2 -2
  39. package/dist/types.d.mts +5 -3
  40. package/package.json +11 -7
@@ -1,20 +1,278 @@
1
1
  import { a as getCacheDir, n as REFERENCES_DIR } from "./config.mjs";
2
- import { basename, join } from "node:path";
2
+ import { basename, join, resolve } from "pathe";
3
3
  import { existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from "node:fs";
4
+ const ZERO_WIDTH_RE = /[\u200B\u200C\uFEFF\u2060\u200D\u061C\u180E\u200E\u200F\u2028\u2029]/gu;
5
+ const HTML_COMMENT_RE = /<!--[\s\S]*?-->/g;
6
+ const AGENT_DIRECTIVE_TAGS = [
7
+ "system",
8
+ "instructions",
9
+ "override",
10
+ "prompt",
11
+ "context",
12
+ "role",
13
+ "user-prompt",
14
+ "assistant",
15
+ "tool-use",
16
+ "tool-result",
17
+ "system-prompt",
18
+ "human",
19
+ "admin"
20
+ ];
21
+ const DANGEROUS_HTML_TAGS = [
22
+ "script",
23
+ "iframe",
24
+ "style",
25
+ "meta",
26
+ "object",
27
+ "embed",
28
+ "form"
29
+ ];
30
+ function decodeAngleBracketEntities(text) {
31
+ return text.replace(/&lt;/gi, "<").replace(/&gt;/gi, ">").replace(/&#0*60;/g, "<").replace(/&#0*62;/g, ">").replace(/&#x0*3c;/gi, "<").replace(/&#x0*3e;/gi, ">");
32
+ }
33
+ function stripTags(text, tags) {
34
+ if (!tags.length) return text;
35
+ const tagGroup = tags.join("|");
36
+ const pairedRe = new RegExp(`<(${tagGroup})(\\s[^>]*)?>([\\s\\S]*?)<\\/\\1>`, "gi");
37
+ let result = text.replace(pairedRe, "");
38
+ const standaloneRe = new RegExp(`<\\/?(${tagGroup})(\\s[^>]*)?\\/?>`, "gi");
39
+ result = result.replace(standaloneRe, "");
40
+ return result;
41
+ }
42
+ const EXTERNAL_IMAGE_RE = /!\[([^\]]*)\]\(https?:\/\/[^)]+\)/gi;
43
+ const EXTERNAL_LINK_RE = /\[([^\]]*)\]\((https?:\/\/[^)]+)\)/gi;
44
+ const DANGEROUS_PROTOCOL_RE = /!?\[([^\]]*)\]\(\s*(javascript|data|vbscript|file)\s*:[^)]*\)/gi;
45
+ const DANGEROUS_PROTOCOL_ENCODED_RE = /!?\[([^\]]*)\]\(\s*(?:(?:j|%6a|%4a)(?:a|%61|%41)(?:v|%76|%56)(?:a|%61|%41)(?:s|%73|%53)(?:c|%63|%43)(?:r|%72|%52)(?:i|%69|%49)(?:p|%70|%50)(?:t|%74|%54)|(?:d|%64|%44)(?:a|%61|%41)(?:t|%74|%54)(?:a|%61|%41)|(?:v|%76|%56)(?:b|%62|%42)(?:s|%73|%53)(?:c|%63|%43)(?:r|%72|%52)(?:i|%69|%49)(?:p|%70|%50)(?:t|%74|%54))\s*:[^)]*\)/gi;
46
+ const DIRECTIVE_LINE_RE = /^[ \t]*(SYSTEM|OVERRIDE|INSTRUCTION|NOTE TO AI|IGNORE PREVIOUS|IGNORE ALL PREVIOUS|DISREGARD|FORGET ALL|NEW INSTRUCTIONS?|IMPORTANT SYSTEM|ADMIN OVERRIDE)\s*[:>].*/gim;
47
+ const BASE64_BLOB_RE = /^[A-Z0-9+/=]{100,}$/gim;
48
+ const UNICODE_ESCAPE_SPAM_RE = /(\\u[\dA-Fa-f]{4}){4,}/g;
49
+ function processOutsideCodeBlocks(content, fn) {
50
+ const lines = content.split("\n");
51
+ const result = [];
52
+ let nonCodeBuffer = [];
53
+ let codeBuffer = [];
54
+ let inCodeBlock = false;
55
+ let fenceChar = "";
56
+ let fenceLen = 0;
57
+ function flushNonCode() {
58
+ if (nonCodeBuffer.length > 0) {
59
+ result.push(fn(nonCodeBuffer.join("\n")));
60
+ nonCodeBuffer = [];
61
+ }
62
+ }
63
+ for (const line of lines) {
64
+ const trimmed = line.trimStart();
65
+ if (!inCodeBlock) {
66
+ const match = trimmed.match(/^(`{3,}|~{3,})/);
67
+ if (match) {
68
+ flushNonCode();
69
+ inCodeBlock = true;
70
+ fenceChar = match[1][0];
71
+ fenceLen = match[1].length;
72
+ codeBuffer = [line];
73
+ continue;
74
+ }
75
+ nonCodeBuffer.push(line);
76
+ } else {
77
+ const match = trimmed.match(/^(`{3,}|~{3,})\s*$/);
78
+ if (match && match[1][0] === fenceChar && match[1].length >= fenceLen) {
79
+ result.push(codeBuffer.join("\n"));
80
+ result.push(line);
81
+ codeBuffer = [];
82
+ inCodeBlock = false;
83
+ fenceChar = "";
84
+ fenceLen = 0;
85
+ continue;
86
+ }
87
+ codeBuffer.push(line);
88
+ }
89
+ }
90
+ flushNonCode();
91
+ if (inCodeBlock && codeBuffer.length > 0) result.push(fn(codeBuffer.join("\n")));
92
+ return result.join("\n");
93
+ }
94
+ function sanitizeMarkdown(content) {
95
+ if (!content) return content;
96
+ let result = content.replace(ZERO_WIDTH_RE, "");
97
+ result = result.replace(HTML_COMMENT_RE, "");
98
+ result = stripTags(result, AGENT_DIRECTIVE_TAGS);
99
+ result = processOutsideCodeBlocks(result, (text) => {
100
+ let t = decodeAngleBracketEntities(text);
101
+ t = stripTags(t, [...AGENT_DIRECTIVE_TAGS, ...DANGEROUS_HTML_TAGS]);
102
+ t = t.replace(EXTERNAL_IMAGE_RE, "");
103
+ t = t.replace(EXTERNAL_LINK_RE, "$1");
104
+ t = t.replace(DANGEROUS_PROTOCOL_RE, "");
105
+ t = t.replace(DANGEROUS_PROTOCOL_ENCODED_RE, "");
106
+ t = t.replace(DIRECTIVE_LINE_RE, "");
107
+ t = t.replace(BASE64_BLOB_RE, "");
108
+ t = t.replace(UNICODE_ESCAPE_SPAM_RE, "");
109
+ return t;
110
+ });
111
+ return result;
112
+ }
113
+ const HEADING_NO_SPACE_RE = /^(#{1,6})([^\s#])/gm;
114
+ const EXCESSIVE_BLANKS_RE = /\n{4,}/g;
115
+ const TRAILING_WHITESPACE_RE = /[ \t]+$/gm;
116
+ const EMOJI_LINE_START_RE = /^\p{Extended_Pictographic}/u;
117
+ function closeUnclosedCodeBlocks(content) {
118
+ const lines = content.split("\n");
119
+ const result = [];
120
+ let inCodeBlock = false;
121
+ let fence = "";
122
+ for (const line of lines) {
123
+ const trimmed = line.trimStart();
124
+ if (!inCodeBlock) {
125
+ const match = trimmed.match(/^(`{3,}|~{3,})/);
126
+ if (match) {
127
+ inCodeBlock = true;
128
+ fence = match[1][0].repeat(match[1].length);
129
+ }
130
+ } else {
131
+ const match = trimmed.match(/^(`{3,}|~{3,})\s*$/);
132
+ if (match && match[1][0] === fence[0] && match[1].length >= fence.length) {
133
+ inCodeBlock = false;
134
+ fence = "";
135
+ } else {
136
+ const openMatch = trimmed.match(/^(`{3,}|~{3,})\S/);
137
+ if (openMatch && openMatch[1][0] === fence[0] && openMatch[1].length === fence.length) result.push(fence);
138
+ else if (EMOJI_LINE_START_RE.test(trimmed)) {
139
+ result.push(fence);
140
+ inCodeBlock = false;
141
+ fence = "";
142
+ }
143
+ }
144
+ }
145
+ result.push(line);
146
+ }
147
+ if (inCodeBlock) {
148
+ if (result.length > 0 && result[result.length - 1] !== "") result.push("");
149
+ result.push(fence);
150
+ }
151
+ return result.join("\n");
152
+ }
153
+ function cleanupCodeBlocks(content) {
154
+ const lines = content.split("\n");
155
+ const toRemove = /* @__PURE__ */ new Set();
156
+ let prevCodeContent;
157
+ let i = 0;
158
+ while (i < lines.length) {
159
+ const trimmed = lines[i].trimStart();
160
+ const fm = trimmed.match(/^(`{3,}|~{3,})/);
161
+ if (!fm) {
162
+ if (trimmed) prevCodeContent = void 0;
163
+ i++;
164
+ continue;
165
+ }
166
+ const fChar = fm[1][0];
167
+ const fLen = fm[1].length;
168
+ const openIdx = i;
169
+ i++;
170
+ let closeIdx = -1;
171
+ while (i < lines.length) {
172
+ const cm = lines[i].trimStart().match(/^(`{3,}|~{3,})\s*$/);
173
+ if (cm && cm[1][0] === fChar && cm[1].length >= fLen) {
174
+ closeIdx = i;
175
+ i++;
176
+ break;
177
+ }
178
+ i++;
179
+ }
180
+ if (closeIdx === -1) continue;
181
+ const inner = lines.slice(openIdx + 1, closeIdx).join("\n").trim();
182
+ if (!inner) for (let j = openIdx; j <= closeIdx; j++) toRemove.add(j);
183
+ else if (inner === prevCodeContent) for (let j = openIdx; j <= closeIdx; j++) toRemove.add(j);
184
+ else prevCodeContent = inner;
185
+ }
186
+ if (!toRemove.size) return content;
187
+ return lines.filter((_, idx) => !toRemove.has(idx)).join("\n");
188
+ }
189
+ function closeUnclosedInlineCode(content) {
190
+ const lines = content.split("\n");
191
+ let inFence = false;
192
+ let fenceChar = "";
193
+ let fenceLen = 0;
194
+ return lines.map((line) => {
195
+ const trimmed = line.trimStart();
196
+ if (!inFence) {
197
+ const m = trimmed.match(/^(`{3,}|~{3,})/);
198
+ if (m) {
199
+ inFence = true;
200
+ fenceChar = m[1][0];
201
+ fenceLen = m[1].length;
202
+ return line;
203
+ }
204
+ } else {
205
+ const m = trimmed.match(/^(`{3,}|~{3,})\s*$/);
206
+ if (m && m[1][0] === fenceChar && m[1].length >= fenceLen) inFence = false;
207
+ return line;
208
+ }
209
+ let i = 0;
210
+ while (i < line.length) if (line[i] === "`") {
211
+ const seqStart = i;
212
+ while (i < line.length && line[i] === "`") i++;
213
+ const seqLen = i - seqStart;
214
+ let found = false;
215
+ let j = i;
216
+ while (j < line.length) if (line[j] === "`") {
217
+ const closeStart = j;
218
+ while (j < line.length && line[j] === "`") j++;
219
+ if (j - closeStart === seqLen) {
220
+ found = true;
221
+ i = j;
222
+ break;
223
+ }
224
+ } else j++;
225
+ if (!found) {
226
+ line = `${line}${"`".repeat(seqLen)}`;
227
+ i = line.length;
228
+ }
229
+ } else i++;
230
+ return line;
231
+ }).join("\n");
232
+ }
233
+ function repairMarkdown(content) {
234
+ if (!content) return content;
235
+ let result = content;
236
+ result = closeUnclosedCodeBlocks(result);
237
+ result = cleanupCodeBlocks(result);
238
+ result = closeUnclosedInlineCode(result);
239
+ result = processOutsideCodeBlocks(result, (text) => text.replace(HEADING_NO_SPACE_RE, "$1 $2"));
240
+ result = result.replace(EXCESSIVE_BLANKS_RE, "\n\n\n");
241
+ result = result.replace(TRAILING_WHITESPACE_RE, "");
242
+ return result;
243
+ }
244
+ function safeSymlink(target, linkPath) {
245
+ const resolved = resolve(target);
246
+ if (!resolved.startsWith(REFERENCES_DIR)) throw new Error(`Symlink target outside references dir: ${resolved}`);
247
+ try {
248
+ const stat = lstatSync(linkPath);
249
+ if (stat.isSymbolicLink() || stat.isFile()) unlinkSync(linkPath);
250
+ } catch {}
251
+ symlinkSync(target, linkPath, "junction");
252
+ }
4
253
  function isCached(name, version) {
5
254
  return existsSync(getCacheDir(name, version));
6
255
  }
7
256
  function ensureCacheDir() {
8
- mkdirSync(REFERENCES_DIR, { recursive: true });
257
+ mkdirSync(REFERENCES_DIR, {
258
+ recursive: true,
259
+ mode: 448
260
+ });
9
261
  }
10
262
  function writeToCache(name, version, docs) {
11
263
  const cacheDir = getCacheDir(name, version);
12
- mkdirSync(cacheDir, { recursive: true });
264
+ mkdirSync(cacheDir, {
265
+ recursive: true,
266
+ mode: 448
267
+ });
13
268
  cleanStaleCacheDirs(name, version);
14
269
  for (const doc of docs) {
15
270
  const filePath = join(cacheDir, doc.path);
16
- mkdirSync(join(filePath, ".."), { recursive: true });
17
- writeFileSync(filePath, doc.content);
271
+ mkdirSync(join(filePath, ".."), {
272
+ recursive: true,
273
+ mode: 448
274
+ });
275
+ writeFileSync(filePath, sanitizeMarkdown(doc.content), { mode: 384 });
18
276
  }
19
277
  return cacheDir;
20
278
  }
@@ -44,17 +302,23 @@ function linkReferences(skillDir, name, version) {
44
302
  const docsLinkPath = join(referencesDir, "docs");
45
303
  const cachedDocsPath = join(cacheDir, "docs");
46
304
  mkdirSync(referencesDir, { recursive: true });
47
- if (existsSync(docsLinkPath)) unlinkSync(docsLinkPath);
48
- if (existsSync(cachedDocsPath)) symlinkSync(cachedDocsPath, docsLinkPath, "junction");
305
+ if (existsSync(cachedDocsPath)) safeSymlink(cachedDocsPath, docsLinkPath);
306
+ }
307
+ function linkIssues(skillDir, name, version) {
308
+ const cacheDir = getCacheDir(name, version);
309
+ const referencesDir = join(skillDir, ".skilld");
310
+ const linkPath = join(referencesDir, "issues");
311
+ const cachedPath = join(cacheDir, "issues");
312
+ mkdirSync(referencesDir, { recursive: true });
313
+ if (existsSync(cachedPath)) safeSymlink(cachedPath, linkPath);
49
314
  }
50
- function linkGithub(skillDir, name, version) {
315
+ function linkDiscussions(skillDir, name, version) {
51
316
  const cacheDir = getCacheDir(name, version);
52
317
  const referencesDir = join(skillDir, ".skilld");
53
- const githubLinkPath = join(referencesDir, "github");
54
- const cachedGithubPath = join(cacheDir, "github");
318
+ const linkPath = join(referencesDir, "discussions");
319
+ const cachedPath = join(cacheDir, "discussions");
55
320
  mkdirSync(referencesDir, { recursive: true });
56
- if (existsSync(githubLinkPath)) unlinkSync(githubLinkPath);
57
- if (existsSync(cachedGithubPath)) symlinkSync(cachedGithubPath, githubLinkPath, "junction");
321
+ if (existsSync(cachedPath)) safeSymlink(cachedPath, linkPath);
58
322
  }
59
323
  function resolvePkgDir(name, cwd, version) {
60
324
  const nodeModulesPath = join(cwd, "node_modules", name);
@@ -74,6 +338,16 @@ function linkPkg(skillDir, name, cwd, version) {
74
338
  if (existsSync(pkgLinkPath)) unlinkSync(pkgLinkPath);
75
339
  symlinkSync(pkgPath, pkgLinkPath, "junction");
76
340
  }
341
+ function linkPkgNamed(skillDir, name, cwd, version) {
342
+ const pkgPath = resolvePkgDir(name, cwd, version);
343
+ if (!pkgPath) return;
344
+ const shortName = name.split("/").pop().toLowerCase();
345
+ const referencesDir = join(skillDir, ".skilld");
346
+ mkdirSync(referencesDir, { recursive: true });
347
+ const linkPath = join(referencesDir, `pkg-${shortName}`);
348
+ if (existsSync(linkPath)) unlinkSync(linkPath);
349
+ symlinkSync(pkgPath, linkPath, "junction");
350
+ }
77
351
  function getPkgKeyFiles(name, cwd, version) {
78
352
  const pkgPath = resolvePkgDir(name, cwd, version);
79
353
  if (!pkgPath) return [];
@@ -84,11 +358,8 @@ function getPkgKeyFiles(name, cwd, version) {
84
358
  if (pkg.main) files.push(basename(pkg.main));
85
359
  if (pkg.module && pkg.module !== pkg.main) files.push(basename(pkg.module));
86
360
  }
87
- for (const f of [
88
- "README.md",
89
- "CHANGELOG.md",
90
- "changelog.md"
91
- ]) if (existsSync(join(pkgPath, f))) files.push(f);
361
+ const entries = readdirSync(pkgPath).filter((f) => /^readme\.md$/i.test(f) || /^changelog\.md$/i.test(f));
362
+ files.push(...entries);
92
363
  return [...new Set(files)];
93
364
  }
94
365
  function getShippedSkills(name, cwd, version) {
@@ -96,11 +367,32 @@ function getShippedSkills(name, cwd, version) {
96
367
  if (!pkgPath) return [];
97
368
  const skillsPath = join(pkgPath, "skills");
98
369
  if (!existsSync(skillsPath)) return [];
99
- return readdirSync(skillsPath, { withFileTypes: true }).filter((d) => d.isDirectory() && existsSync(join(skillsPath, d.name, "_SKILL.md"))).map((d) => ({
370
+ return readdirSync(skillsPath, { withFileTypes: true }).filter((d) => d.isDirectory() && (existsSync(join(skillsPath, d.name, "SKILL.md")) || existsSync(join(skillsPath, d.name, "_SKILL.md")))).map((d) => ({
100
371
  skillName: d.name,
101
372
  skillDir: join(skillsPath, d.name)
102
373
  }));
103
374
  }
375
+ function linkSections(skillDir, name, version) {
376
+ const cacheDir = getCacheDir(name, version);
377
+ const referencesDir = join(skillDir, ".skilld");
378
+ const linkPath = join(referencesDir, "sections");
379
+ const cachedPath = join(cacheDir, "sections");
380
+ mkdirSync(referencesDir, { recursive: true });
381
+ if (existsSync(cachedPath)) safeSymlink(cachedPath, linkPath);
382
+ }
383
+ function writeSections(name, version, sections) {
384
+ const sectionsDir = join(getCacheDir(name, version), "sections");
385
+ mkdirSync(sectionsDir, {
386
+ recursive: true,
387
+ mode: 448
388
+ });
389
+ for (const { file, content } of sections) writeFileSync(join(sectionsDir, file), content, { mode: 384 });
390
+ }
391
+ function readCachedSection(name, version, file) {
392
+ const path = join(getCacheDir(name, version), "sections", file);
393
+ if (!existsSync(path)) return null;
394
+ return readFileSync(path, "utf-8");
395
+ }
104
396
  function linkReleases(skillDir, name, version) {
105
397
  const cacheDir = getCacheDir(name, version);
106
398
  const referencesDir = join(skillDir, ".skilld");
@@ -193,6 +485,6 @@ function listReferenceFiles(skillDir, maxDepth = 3) {
193
485
  walk(referencesDir, 0);
194
486
  return files;
195
487
  }
196
- export { writeToCache as _, getShippedSkills as a, linkGithub as c, linkReleases as d, linkShippedSkill as f, resolvePkgDir as g, readCachedDocs as h, getPkgKeyFiles as i, linkPkg as l, listReferenceFiles as m, clearCache as n, hasShippedDocs as o, listCached as p, ensureCacheDir as r, isCached as s, clearAllCache as t, linkReferences as u };
488
+ export { repairMarkdown as C, writeToCache as S, listReferenceFiles as _, getShippedSkills as a, resolvePkgDir as b, linkDiscussions as c, linkPkgNamed as d, linkReferences as f, listCached as g, linkShippedSkill as h, getPkgKeyFiles as i, linkIssues as l, linkSections as m, clearCache as n, hasShippedDocs as o, linkReleases as p, ensureCacheDir as r, isCached as s, clearAllCache as t, linkPkg as u, readCachedDocs as v, sanitizeMarkdown as w, writeSections as x, readCachedSection as y };
197
489
 
198
490
  //# sourceMappingURL=storage.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"storage.mjs","names":[],"sources":["../../src/cache/storage.ts"],"sourcesContent":["/**\n * Cache storage operations\n */\n\nimport type { CachedDoc, CachedPackage } from './types'\nimport { existsSync, lstatSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from 'node:fs'\nimport { basename, join } from 'node:path'\nimport { REFERENCES_DIR } from './config'\nimport { getCacheDir } from './version'\n\n/**\n * Check if package is cached at given version\n */\nexport function isCached(name: string, version: string): boolean {\n return existsSync(getCacheDir(name, version))\n}\n\n/**\n * Ensure cache directories exist\n */\nexport function ensureCacheDir(): void {\n mkdirSync(REFERENCES_DIR, { recursive: true })\n}\n\n/**\n * Write docs to cache, cleaning stale version dirs for the same package\n */\nexport function writeToCache(\n name: string,\n version: string,\n docs: CachedDoc[],\n): string {\n const cacheDir = getCacheDir(name, version)\n mkdirSync(cacheDir, { recursive: true })\n\n // Clean stale cache dirs for same package with different version keys\n cleanStaleCacheDirs(name, version)\n\n for (const doc of docs) {\n const filePath = join(cacheDir, doc.path)\n mkdirSync(join(filePath, '..'), { recursive: true })\n writeFileSync(filePath, doc.content)\n }\n\n return cacheDir\n}\n\n/**\n * Remove stale cache dirs for same package but different version keys\n * e.g. @clack/prompts@1.0 vs @clack/prompts@1.0.0\n */\nfunction cleanStaleCacheDirs(name: string, version: string): void {\n const prefix = `${name}@`\n\n // For scoped packages, check inside the scope dir\n if (name.startsWith('@')) {\n const [scope, pkg] = name.split('/')\n const scopeDir = join(REFERENCES_DIR, scope!)\n if (!existsSync(scopeDir))\n return\n\n const scopePrefix = `${pkg}@`\n const currentDirName = basename(getCacheDir(name, version))\n\n for (const entry of readdirSync(scopeDir)) {\n if (entry.startsWith(scopePrefix) && entry !== currentDirName) {\n rmSync(join(scopeDir, entry), { recursive: true, force: true })\n }\n }\n }\n else {\n if (!existsSync(REFERENCES_DIR))\n return\n for (const entry of readdirSync(REFERENCES_DIR)) {\n if (entry.startsWith(prefix) && entry !== basename(getCacheDir(name, version))) {\n rmSync(join(REFERENCES_DIR, entry), { recursive: true, force: true })\n }\n }\n }\n}\n\n/**\n * Create .skilld directory with symlinked docs (only if external fetch needed)\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/\n * pkg -> node_modules/<pkg> (always, has package.json, README.md, dist/)\n * docs -> ~/.skilld/references/<pkg>@<version>/docs (only if fetched externally)\n *\n * The .skilld/ dirs are gitignored. After clone, `skilld install` recreates from lockfile.\n */\nexport function linkReferences(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const docsLinkPath = join(referencesDir, 'docs')\n const cachedDocsPath = join(cacheDir, 'docs')\n\n // Create references dir if needed\n mkdirSync(referencesDir, { recursive: true })\n\n // Symlink docs from cache\n if (existsSync(docsLinkPath)) {\n unlinkSync(docsLinkPath)\n }\n if (existsSync(cachedDocsPath)) {\n symlinkSync(cachedDocsPath, docsLinkPath, 'junction')\n }\n}\n\n/**\n * Create symlink from .skilld dir to cached github data (issues + discussions)\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/github -> ~/.skilld/references/<pkg>@<version>/github\n */\nexport function linkGithub(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const githubLinkPath = join(referencesDir, 'github')\n const cachedGithubPath = join(cacheDir, 'github')\n\n mkdirSync(referencesDir, { recursive: true })\n\n if (existsSync(githubLinkPath)) {\n unlinkSync(githubLinkPath)\n }\n if (existsSync(cachedGithubPath)) {\n symlinkSync(cachedGithubPath, githubLinkPath, 'junction')\n }\n}\n\n/**\n * Resolve the package directory: node_modules first, then cached dist fallback.\n * Returns the path if found, null otherwise.\n */\nexport function resolvePkgDir(name: string, cwd: string, version?: string): string | null {\n const nodeModulesPath = join(cwd, 'node_modules', name)\n if (existsSync(nodeModulesPath))\n return nodeModulesPath\n\n // Fallback: check cached npm dist\n if (version) {\n const cachedPkgDir = join(getCacheDir(name, version), 'pkg')\n if (existsSync(join(cachedPkgDir, 'package.json')))\n return cachedPkgDir\n }\n\n return null\n}\n\n/**\n * Create symlink from .skilld dir to package directory\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/pkg -> node_modules/<pkg> OR ~/.skilld/references/<pkg>@<version>/pkg\n *\n * This gives access to package.json, README.md, dist/, and any shipped docs/\n */\nexport function linkPkg(skillDir: string, name: string, cwd: string, version?: string): void {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return\n\n const referencesDir = join(skillDir, '.skilld')\n mkdirSync(referencesDir, { recursive: true })\n\n const pkgLinkPath = join(referencesDir, 'pkg')\n if (existsSync(pkgLinkPath)) {\n unlinkSync(pkgLinkPath)\n }\n symlinkSync(pkgPath, pkgLinkPath, 'junction')\n}\n\n/**\n * Get key files from a package directory for display\n * Returns entry points + docs files\n */\nexport function getPkgKeyFiles(name: string, cwd: string, version?: string): string[] {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return []\n\n const files: string[] = []\n const pkgJsonPath = join(pkgPath, 'package.json')\n\n if (existsSync(pkgJsonPath)) {\n const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf-8'))\n\n // Entry points\n if (pkg.main)\n files.push(basename(pkg.main))\n if (pkg.module && pkg.module !== pkg.main)\n files.push(basename(pkg.module))\n }\n\n // Check for common doc files\n for (const f of ['README.md', 'CHANGELOG.md', 'changelog.md']) {\n if (existsSync(join(pkgPath, f)))\n files.push(f)\n }\n\n return [...new Set(files)]\n}\n\n/**\n * Check if package ships its own docs folder\n */\nexport interface ShippedSkill {\n skillName: string\n skillDir: string\n}\n\n/**\n * Check if package ships a skills/ directory with _SKILL.md subdirs\n */\nexport function getShippedSkills(name: string, cwd: string, version?: string): ShippedSkill[] {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return []\n\n const skillsPath = join(pkgPath, 'skills')\n if (!existsSync(skillsPath))\n return []\n\n return readdirSync(skillsPath, { withFileTypes: true })\n .filter(d => d.isDirectory() && existsSync(join(skillsPath, d.name, '_SKILL.md')))\n .map(d => ({ skillName: d.name, skillDir: join(skillsPath, d.name) }))\n}\n\n/**\n * Create symlink from .skilld dir to cached releases\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/releases -> ~/.skilld/references/<pkg>@<version>/releases\n */\nexport function linkReleases(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const releasesLinkPath = join(referencesDir, 'releases')\n const cachedReleasesPath = join(cacheDir, 'releases')\n\n mkdirSync(referencesDir, { recursive: true })\n\n if (existsSync(releasesLinkPath)) {\n unlinkSync(releasesLinkPath)\n }\n if (existsSync(cachedReleasesPath)) {\n symlinkSync(cachedReleasesPath, releasesLinkPath, 'junction')\n }\n}\n\n/**\n * Create symlink from skills dir to shipped skill dir\n */\nexport function linkShippedSkill(baseDir: string, skillName: string, targetDir: string): void {\n const linkPath = join(baseDir, skillName)\n if (existsSync(linkPath)) {\n const stat = lstatSync(linkPath)\n if (stat.isSymbolicLink())\n unlinkSync(linkPath)\n else rmSync(linkPath, { recursive: true, force: true })\n }\n symlinkSync(targetDir, linkPath)\n}\n\nexport function hasShippedDocs(name: string, cwd: string, version?: string): boolean {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return false\n\n const docsCandidates = ['docs', 'documentation', 'doc']\n for (const candidate of docsCandidates) {\n const docsPath = join(pkgPath, candidate)\n if (existsSync(docsPath))\n return true\n }\n return false\n}\n\n/**\n * List all cached packages\n */\nexport function listCached(): CachedPackage[] {\n if (!existsSync(REFERENCES_DIR))\n return []\n\n return readdirSync(REFERENCES_DIR)\n .filter(name => name.includes('@'))\n .map((dir) => {\n const [name, version] = dir.split('@')\n return { name: name!, version: version!, dir: join(REFERENCES_DIR, dir) }\n })\n}\n\n/**\n * Read cached docs for a package\n */\nexport function readCachedDocs(name: string, version: string): CachedDoc[] {\n const cacheDir = getCacheDir(name, version)\n if (!existsSync(cacheDir))\n return []\n\n const docs: CachedDoc[] = []\n\n function walk(dir: string, prefix = '') {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const entryPath = join(dir, entry.name)\n const relativePath = prefix ? `${prefix}/${entry.name}` : entry.name\n\n if (entry.isDirectory()) {\n walk(entryPath, relativePath)\n }\n else if (entry.name.endsWith('.md') || entry.name.endsWith('.mdx')) {\n docs.push({\n path: relativePath,\n content: readFileSync(entryPath, 'utf-8'),\n })\n }\n }\n }\n\n walk(cacheDir)\n return docs\n}\n\n/**\n * Clear cache for a specific package\n */\nexport function clearCache(name: string, version: string): boolean {\n const cacheDir = getCacheDir(name, version)\n if (!existsSync(cacheDir))\n return false\n\n rmSync(cacheDir, { recursive: true })\n return true\n}\n\n/**\n * Clear all cache\n */\nexport function clearAllCache(): number {\n const packages = listCached()\n for (const pkg of packages) {\n clearCache(pkg.name, pkg.version)\n }\n return packages.length\n}\n\n/**\n * List files in .skilld directory (pkg + docs) as relative paths for prompt context\n * Returns paths like ./.skilld/pkg/README.md, ./.skilld/docs/api.md\n */\nexport function listReferenceFiles(skillDir: string, maxDepth = 3): string[] {\n const referencesDir = join(skillDir, '.skilld')\n if (!existsSync(referencesDir))\n return []\n\n const files: string[] = []\n\n function walk(dir: string, depth: number) {\n if (depth > maxDepth)\n return\n try {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const full = join(dir, entry.name)\n if (entry.isDirectory() || entry.isSymbolicLink()) {\n try {\n const stat = statSync(full)\n if (stat.isDirectory()) {\n walk(full, depth + 1)\n continue\n }\n }\n catch { continue }\n }\n if (entry.name.endsWith('.md')) {\n files.push(full)\n }\n }\n }\n catch {\n // Broken symlink or permission error\n }\n }\n\n walk(referencesDir, 0)\n return files\n}\n"],"mappings":";;;AAaA,SAAgB,SAAS,MAAc,SAA0B;AAC/D,QAAO,WAAW,YAAY,MAAM,QAAQ,CAAC;;AAM/C,SAAgB,iBAAuB;AACrC,WAAU,gBAAgB,EAAE,WAAW,MAAM,CAAC;;AAMhD,SAAgB,aACd,MACA,SACA,MACQ;CACR,MAAM,WAAW,YAAY,MAAM,QAAQ;AAC3C,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAGxC,qBAAoB,MAAM,QAAQ;AAElC,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,WAAW,KAAK,UAAU,IAAI,KAAK;AACzC,YAAU,KAAK,UAAU,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACpD,gBAAc,UAAU,IAAI,QAAQ;;AAGtC,QAAO;;AAOT,SAAS,oBAAoB,MAAc,SAAuB;CAChE,MAAM,SAAS,GAAG,KAAK;AAGvB,KAAI,KAAK,WAAW,IAAI,EAAE;EACxB,MAAM,CAAC,OAAO,OAAO,KAAK,MAAM,IAAI;EACpC,MAAM,WAAW,KAAK,gBAAgB,MAAO;AAC7C,MAAI,CAAC,WAAW,SAAS,CACvB;EAEF,MAAM,cAAc,GAAG,IAAI;EAC3B,MAAM,iBAAiB,SAAS,YAAY,MAAM,QAAQ,CAAC;AAE3D,OAAK,MAAM,SAAS,YAAY,SAAS,CACvC,KAAI,MAAM,WAAW,YAAY,IAAI,UAAU,eAC7C,QAAO,KAAK,UAAU,MAAM,EAAE;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;QAIhE;AACH,MAAI,CAAC,WAAW,eAAe,CAC7B;AACF,OAAK,MAAM,SAAS,YAAY,eAAe,CAC7C,KAAI,MAAM,WAAW,OAAO,IAAI,UAAU,SAAS,YAAY,MAAM,QAAQ,CAAC,CAC5E,QAAO,KAAK,gBAAgB,MAAM,EAAE;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;;AAgB7E,SAAgB,eAAe,UAAkB,MAAc,SAAuB;CACpF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,eAAe,KAAK,eAAe,OAAO;CAChD,MAAM,iBAAiB,KAAK,UAAU,OAAO;AAG7C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAG7C,KAAI,WAAW,aAAa,CAC1B,YAAW,aAAa;AAE1B,KAAI,WAAW,eAAe,CAC5B,aAAY,gBAAgB,cAAc,WAAW;;AAUzD,SAAgB,WAAW,UAAkB,MAAc,SAAuB;CAChF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,iBAAiB,KAAK,eAAe,SAAS;CACpD,MAAM,mBAAmB,KAAK,UAAU,SAAS;AAEjD,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAE7C,KAAI,WAAW,eAAe,CAC5B,YAAW,eAAe;AAE5B,KAAI,WAAW,iBAAiB,CAC9B,aAAY,kBAAkB,gBAAgB,WAAW;;AAQ7D,SAAgB,cAAc,MAAc,KAAa,SAAiC;CACxF,MAAM,kBAAkB,KAAK,KAAK,gBAAgB,KAAK;AACvD,KAAI,WAAW,gBAAgB,CAC7B,QAAO;AAGT,KAAI,SAAS;EACX,MAAM,eAAe,KAAK,YAAY,MAAM,QAAQ,EAAE,MAAM;AAC5D,MAAI,WAAW,KAAK,cAAc,eAAe,CAAC,CAChD,QAAO;;AAGX,QAAO;;AAWT,SAAgB,QAAQ,UAAkB,MAAc,KAAa,SAAwB;CAC3F,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH;CAEF,MAAM,gBAAgB,KAAK,UAAU,UAAU;AAC/C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;CAE7C,MAAM,cAAc,KAAK,eAAe,MAAM;AAC9C,KAAI,WAAW,YAAY,CACzB,YAAW,YAAY;AAEzB,aAAY,SAAS,aAAa,WAAW;;AAO/C,SAAgB,eAAe,MAAc,KAAa,SAA4B;CACpF,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO,EAAE;CAEX,MAAM,QAAkB,EAAE;CAC1B,MAAM,cAAc,KAAK,SAAS,eAAe;AAEjD,KAAI,WAAW,YAAY,EAAE;EAC3B,MAAM,MAAM,KAAK,MAAM,aAAa,aAAa,QAAQ,CAAC;AAG1D,MAAI,IAAI,KACN,OAAM,KAAK,SAAS,IAAI,KAAK,CAAC;AAChC,MAAI,IAAI,UAAU,IAAI,WAAW,IAAI,KACnC,OAAM,KAAK,SAAS,IAAI,OAAO,CAAC;;AAIpC,MAAK,MAAM,KAAK;EAAC;EAAa;EAAgB;EAAe,CAC3D,KAAI,WAAW,KAAK,SAAS,EAAE,CAAC,CAC9B,OAAM,KAAK,EAAE;AAGjB,QAAO,CAAC,GAAG,IAAI,IAAI,MAAM,CAAC;;AAc5B,SAAgB,iBAAiB,MAAc,KAAa,SAAkC;CAC5F,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO,EAAE;CAEX,MAAM,aAAa,KAAK,SAAS,SAAS;AAC1C,KAAI,CAAC,WAAW,WAAW,CACzB,QAAO,EAAE;AAEX,QAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CACpD,QAAO,MAAK,EAAE,aAAa,IAAI,WAAW,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC,CAAC,CACjF,KAAI,OAAM;EAAE,WAAW,EAAE;EAAM,UAAU,KAAK,YAAY,EAAE,KAAA;EAAO,EAAE;;AAS1E,SAAgB,aAAa,UAAkB,MAAc,SAAuB;CAClF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,mBAAmB,KAAK,eAAe,WAAW;CACxD,MAAM,qBAAqB,KAAK,UAAU,WAAW;AAErD,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAE7C,KAAI,WAAW,iBAAiB,CAC9B,YAAW,iBAAiB;AAE9B,KAAI,WAAW,mBAAmB,CAChC,aAAY,oBAAoB,kBAAkB,WAAW;;AAOjE,SAAgB,iBAAiB,SAAiB,WAAmB,WAAyB;CAC5F,MAAM,WAAW,KAAK,SAAS,UAAU;AACzC,KAAI,WAAW,SAAS,CAEtB,KADa,UAAU,SAAS,CACvB,gBAAgB,CACvB,YAAW,SAAS;KACjB,QAAO,UAAU;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAEzD,aAAY,WAAW,SAAS;;AAGlC,SAAgB,eAAe,MAAc,KAAa,SAA2B;CACnF,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO;AAGT,MAAK,MAAM,aADY;EAAC;EAAQ;EAAiB;EAAM,CAGrD,KAAI,WADa,KAAK,SAAS,UAAU,CACjB,CACtB,QAAO;AAEX,QAAO;;AAMT,SAAgB,aAA8B;AAC5C,KAAI,CAAC,WAAW,eAAe,CAC7B,QAAO,EAAE;AAEX,QAAO,YAAY,eAAe,CAC/B,QAAO,SAAQ,KAAK,SAAS,IAAI,CAAC,CAClC,KAAK,QAAQ;EACZ,MAAM,CAAC,MAAM,WAAW,IAAI,MAAM,IAAI;AACtC,SAAO;GAAQ;GAAgB;GAAU,KAAK,KAAK,gBAAgB,IAAA;GAAM;GACzE;;AAMN,SAAgB,eAAe,MAAc,SAA8B;CACzE,MAAM,WAAW,YAAY,MAAM,QAAQ;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO,EAAE;CAEX,MAAM,OAAoB,EAAE;CAE5B,SAAS,KAAK,KAAa,SAAS,IAAI;AACtC,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,YAAY,KAAK,KAAK,MAAM,KAAK;GACvC,MAAM,eAAe,SAAS,GAAG,OAAO,GAAG,MAAM,SAAS,MAAM;AAEhE,OAAI,MAAM,aAAa,CACrB,MAAK,WAAW,aAAa;YAEtB,MAAM,KAAK,SAAS,MAAM,IAAI,MAAM,KAAK,SAAS,OAAO,CAChE,MAAK,KAAK;IACR,MAAM;IACN,SAAS,aAAa,WAAW,QAAA;IAClC,CAAC;;;AAKR,MAAK,SAAS;AACd,QAAO;;AAMT,SAAgB,WAAW,MAAc,SAA0B;CACjE,MAAM,WAAW,YAAY,MAAM,QAAQ;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;AAET,QAAO,UAAU,EAAE,WAAW,MAAM,CAAC;AACrC,QAAO;;AAMT,SAAgB,gBAAwB;CACtC,MAAM,WAAW,YAAY;AAC7B,MAAK,MAAM,OAAO,SAChB,YAAW,IAAI,MAAM,IAAI,QAAQ;AAEnC,QAAO,SAAS;;AAOlB,SAAgB,mBAAmB,UAAkB,WAAW,GAAa;CAC3E,MAAM,gBAAgB,KAAK,UAAU,UAAU;AAC/C,KAAI,CAAC,WAAW,cAAc,CAC5B,QAAO,EAAE;CAEX,MAAM,QAAkB,EAAE;CAE1B,SAAS,KAAK,KAAa,OAAe;AACxC,MAAI,QAAQ,SACV;AACF,MAAI;AACF,QAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;IAC7D,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,QAAI,MAAM,aAAa,IAAI,MAAM,gBAAgB,CAC/C,KAAI;AAEF,SADa,SAAS,KAAK,CAClB,aAAa,EAAE;AACtB,WAAK,MAAM,QAAQ,EAAE;AACrB;;YAGE;AAAE;;AAEV,QAAI,MAAM,KAAK,SAAS,MAAM,CAC5B,OAAM,KAAK,KAAK;;UAIhB;;AAKR,MAAK,eAAe,EAAE;AACtB,QAAO"}
1
+ {"version":3,"file":"storage.mjs","names":[],"sources":["../../src/core/sanitize.ts","../../src/cache/storage.ts"],"sourcesContent":["/**\n * Markdown sanitizer for prompt injection defense.\n *\n * Strips injection vectors from untrusted markdown before it reaches\n * agent-readable files (cached references, SKILL.md, search output).\n *\n * Threat model: agent instruction injection, not browser XSS.\n * Lightweight regex-based — markdown is consumed as text by AI agents.\n */\n\n/** Zero-width and invisible formatting characters used to hide text from human review */\n// eslint-disable-next-line no-misleading-character-class -- intentionally matching individual invisible chars\nconst ZERO_WIDTH_RE = /[\\u200B\\u200C\\uFEFF\\u2060\\u200D\\u061C\\u180E\\u200E\\u200F\\u2028\\u2029]/gu\n\n/** HTML comments (single-line and multi-line) */\nconst HTML_COMMENT_RE = /<!--[\\s\\S]*?-->/g\n\n/**\n * Agent directive tags — stripped globally (including inside code blocks).\n * These are never legitimate in any context; they're purely injection vectors.\n */\nconst AGENT_DIRECTIVE_TAGS = [\n 'system',\n 'instructions',\n 'override',\n 'prompt',\n 'context',\n 'role',\n 'user-prompt',\n 'assistant',\n 'tool-use',\n 'tool-result',\n 'system-prompt',\n 'human',\n 'admin',\n]\n\n/**\n * Dangerous HTML tags — stripped only outside fenced code blocks.\n * May appear legitimately in code examples (e.g. `<script setup>` in Vue docs).\n */\nconst DANGEROUS_HTML_TAGS = [\n 'script',\n 'iframe',\n 'style',\n 'meta',\n 'object',\n 'embed',\n 'form',\n]\n/**\n * Decode HTML entity-encoded angle brackets so tag stripping catches encoded variants.\n * Only decodes < and > (named, decimal, hex) — minimal to avoid false positives.\n */\nfunction decodeAngleBracketEntities(text: string): string {\n return text\n .replace(/&lt;/gi, '<')\n .replace(/&gt;/gi, '>')\n .replace(/&#0*60;/g, '<')\n .replace(/&#0*62;/g, '>')\n .replace(/&#x0*3c;/gi, '<')\n .replace(/&#x0*3e;/gi, '>')\n}\n\n/** Strip paired and standalone instances of the given tag names */\nfunction stripTags(text: string, tags: string[]): string {\n if (!tags.length)\n return text\n const tagGroup = tags.join('|')\n // First strip paired tags with content between them\n const pairedRe = new RegExp(`<(${tagGroup})(\\\\s[^>]*)?>([\\\\s\\\\S]*?)<\\\\/\\\\1>`, 'gi')\n let result = text.replace(pairedRe, '')\n // Then strip any remaining standalone open/close/self-closing tags\n const standaloneRe = new RegExp(`<\\\\/?(${tagGroup})(\\\\s[^>]*)?\\\\/?>`, 'gi')\n result = result.replace(standaloneRe, '')\n return result\n}\n\n/** External image markdown: ![alt](https://...) or ![alt](http://...) */\nconst EXTERNAL_IMAGE_RE = /!\\[([^\\]]*)\\]\\(https?:\\/\\/[^)]+\\)/gi\n\n/**\n * External link markdown: [text](https://...) or [text](http://...)\n * Preserves relative links and anchors.\n */\nconst EXTERNAL_LINK_RE = /\\[([^\\]]*)\\]\\((https?:\\/\\/[^)]+)\\)/gi\n\n/** Dangerous URI protocols in links/images — match entire [text](protocol:...) */\nconst DANGEROUS_PROTOCOL_RE = /!?\\[([^\\]]*)\\]\\(\\s*(javascript|data|vbscript|file)\\s*:[^)]*\\)/gi\nconst DANGEROUS_PROTOCOL_ENCODED_RE = /!?\\[([^\\]]*)\\]\\(\\s*(?:(?:j|%6a|%4a)(?:a|%61|%41)(?:v|%76|%56)(?:a|%61|%41)(?:s|%73|%53)(?:c|%63|%43)(?:r|%72|%52)(?:i|%69|%49)(?:p|%70|%50)(?:t|%74|%54)|(?:d|%64|%44)(?:a|%61|%41)(?:t|%74|%54)(?:a|%61|%41)|(?:v|%76|%56)(?:b|%62|%42)(?:s|%73|%53)(?:c|%63|%43)(?:r|%72|%52)(?:i|%69|%49)(?:p|%70|%50)(?:t|%74|%54))\\s*:[^)]*\\)/gi\n\n/** Directive-style lines that look like agent instructions */\nconst DIRECTIVE_LINE_RE = /^[ \\t]*(SYSTEM|OVERRIDE|INSTRUCTION|NOTE TO AI|IGNORE PREVIOUS|IGNORE ALL PREVIOUS|DISREGARD|FORGET ALL|NEW INSTRUCTIONS?|IMPORTANT SYSTEM|ADMIN OVERRIDE)\\s*[:>].*/gim\n\n/** Base64 blob: 100+ chars of pure base64 alphabet on a single line */\nconst BASE64_BLOB_RE = /^[A-Z0-9+/=]{100,}$/gim\n\n/** Unicode escape spam: 4+ consecutive \\uXXXX sequences */\nconst UNICODE_ESCAPE_SPAM_RE = /(\\\\u[\\dA-Fa-f]{4}){4,}/g\n\n/**\n * Process content outside of fenced code blocks.\n * Uses a line-by-line state machine to properly track fence boundaries,\n * handling nested fences, mismatched lengths, and mixed backtick/tilde fences.\n * Unclosed fences are treated as non-code for security (prevents bypass via malformed fences).\n */\nexport function processOutsideCodeBlocks(content: string, fn: (text: string) => string): string {\n const lines = content.split('\\n')\n const result: string[] = []\n let nonCodeBuffer: string[] = []\n let codeBuffer: string[] = []\n let inCodeBlock = false\n let fenceChar = ''\n let fenceLen = 0\n\n function flushNonCode() {\n if (nonCodeBuffer.length > 0) {\n result.push(fn(nonCodeBuffer.join('\\n')))\n nonCodeBuffer = []\n }\n }\n\n for (const line of lines) {\n const trimmed = line.trimStart()\n\n if (!inCodeBlock) {\n const match = trimmed.match(/^(`{3,}|~{3,})/)\n if (match) {\n flushNonCode()\n inCodeBlock = true\n fenceChar = match[1][0]!\n fenceLen = match[1].length\n codeBuffer = [line]\n continue\n }\n nonCodeBuffer.push(line)\n }\n else {\n const match = trimmed.match(/^(`{3,}|~{3,})\\s*$/)\n if (match && match[1][0] === fenceChar && match[1].length >= fenceLen) {\n // Properly closed — emit code block as-is\n result.push(codeBuffer.join('\\n'))\n result.push(line)\n codeBuffer = []\n inCodeBlock = false\n fenceChar = ''\n fenceLen = 0\n continue\n }\n codeBuffer.push(line)\n }\n }\n\n flushNonCode()\n\n // Unclosed fence: treat as non-code so sanitization still applies\n if (inCodeBlock && codeBuffer.length > 0) {\n result.push(fn(codeBuffer.join('\\n')))\n }\n\n return result.join('\\n')\n}\n\n/**\n * Sanitize markdown content to strip prompt injection vectors.\n * Applied at every markdown emission point (cache writes, SKILL.md, search output).\n */\nexport function sanitizeMarkdown(content: string): string {\n if (!content)\n return content\n\n // Layer 1: Strip zero-width characters (global, including in code blocks)\n let result = content.replace(ZERO_WIDTH_RE, '')\n\n // Layer 2: Strip HTML comments (global, including in code blocks)\n result = result.replace(HTML_COMMENT_RE, '')\n\n // Layer 3a: Strip agent directive tags globally (never legitimate, even in code blocks)\n result = stripTags(result, AGENT_DIRECTIVE_TAGS)\n\n // Layers 3b-8: Only outside fenced code blocks\n result = processOutsideCodeBlocks(result, (text) => {\n // Layer 3b: Decode entities + strip remaining dangerous tags (HTML + entity-encoded agent directives)\n let t = decodeAngleBracketEntities(text)\n t = stripTags(t, [...AGENT_DIRECTIVE_TAGS, ...DANGEROUS_HTML_TAGS])\n\n // Layer 4: Strip external images (exfil via query params)\n t = t.replace(EXTERNAL_IMAGE_RE, '')\n\n // Layer 5: Convert external links to plain text\n t = t.replace(EXTERNAL_LINK_RE, '$1')\n\n // Layer 6: Strip dangerous protocols (raw and URL-encoded)\n t = t.replace(DANGEROUS_PROTOCOL_RE, '')\n t = t.replace(DANGEROUS_PROTOCOL_ENCODED_RE, '')\n\n // Layer 7: Strip directive-style lines\n t = t.replace(DIRECTIVE_LINE_RE, '')\n\n // Layer 8: Strip encoded payloads\n t = t.replace(BASE64_BLOB_RE, '')\n t = t.replace(UNICODE_ESCAPE_SPAM_RE, '')\n\n return t\n })\n\n return result\n}\n\n// --- Markdown repair ---\n\n/** Heading missing space after #: `##Heading` → `## Heading` */\nconst HEADING_NO_SPACE_RE = /^(#{1,6})([^\\s#])/gm\n\n/** 3+ consecutive blank lines → 2 */\nconst EXCESSIVE_BLANKS_RE = /\\n{4,}/g\n\n/** Trailing whitespace on lines (preserve intentional double-space line breaks) */\nconst TRAILING_WHITESPACE_RE = /[ \\t]+$/gm\n\n/** Emoji at start of line inside a code block — LLM forgot to close the block */\nconst EMOJI_LINE_START_RE = /^\\p{Extended_Pictographic}/u\n\n/**\n * Close unclosed fenced code blocks.\n * Walks line-by-line tracking open/close state.\n */\nfunction closeUnclosedCodeBlocks(content: string): string {\n const lines = content.split('\\n')\n const result: string[] = []\n let inCodeBlock = false\n let fence = ''\n\n for (const line of lines) {\n const trimmed = line.trimStart()\n if (!inCodeBlock) {\n const match = trimmed.match(/^(`{3,}|~{3,})/)\n if (match) {\n inCodeBlock = true\n fence = match[1][0]!.repeat(match[1].length)\n }\n }\n else {\n // Check for closing fence (same char, at least same length)\n const match = trimmed.match(/^(`{3,}|~{3,})\\s*$/)\n if (match && match[1][0] === fence[0] && match[1].length >= fence.length) {\n inCodeBlock = false\n fence = ''\n }\n else {\n // New fence opener inside unclosed block (same char, same length, with lang tag)\n // LLMs commonly forget to close a code block before starting a new one\n const openMatch = trimmed.match(/^(`{3,}|~{3,})\\S/)\n if (openMatch && openMatch[1][0] === fence[0] && openMatch[1].length === fence.length) {\n result.push(fence)\n // fence char/length stays the same since both match\n }\n // Emoji at line start → LLM forgot to close code block before markdown content\n else if (EMOJI_LINE_START_RE.test(trimmed)) {\n result.push(fence)\n inCodeBlock = false\n fence = ''\n }\n }\n }\n result.push(line)\n }\n\n // If still inside a code block, close it\n if (inCodeBlock) {\n // Ensure trailing newline before closing fence\n if (result.length > 0 && result[result.length - 1] !== '')\n result.push('')\n result.push(fence)\n }\n\n return result.join('\\n')\n}\n\n/**\n * Remove empty code blocks and deduplicate consecutive identical code blocks.\n * Empty blocks arise when emoji/fence recovery leaves orphaned fences.\n * Duplicate blocks arise when LLMs repeat the same code example.\n */\nfunction cleanupCodeBlocks(content: string): string {\n const lines = content.split('\\n')\n const toRemove = new Set<number>()\n let prevCodeContent: string | undefined\n let i = 0\n\n while (i < lines.length) {\n const trimmed = lines[i]!.trimStart()\n const fm = trimmed.match(/^(`{3,}|~{3,})/)\n if (!fm) {\n // Non-blank text between code blocks resets dedup tracking\n if (trimmed)\n prevCodeContent = undefined\n i++\n continue\n }\n\n const fChar = fm[1][0]!\n const fLen = fm[1].length\n const openIdx = i\n i++\n\n let closeIdx = -1\n while (i < lines.length) {\n const ct = lines[i]!.trimStart()\n const cm = ct.match(/^(`{3,}|~{3,})\\s*$/)\n if (cm && cm[1][0] === fChar && cm[1].length >= fLen) {\n closeIdx = i\n i++\n break\n }\n i++\n }\n\n if (closeIdx === -1)\n continue\n\n const inner = lines.slice(openIdx + 1, closeIdx).join('\\n').trim()\n\n if (!inner) {\n for (let j = openIdx; j <= closeIdx; j++) toRemove.add(j)\n }\n else if (inner === prevCodeContent) {\n for (let j = openIdx; j <= closeIdx; j++) toRemove.add(j)\n }\n else {\n prevCodeContent = inner\n }\n }\n\n if (!toRemove.size)\n return content\n return lines.filter((_, idx) => !toRemove.has(idx)).join('\\n')\n}\n\n/**\n * Close unclosed inline code spans.\n * Scans each line for unmatched backtick(s) and appends closing backtick(s).\n * Tracks fenced code blocks internally to handle any fence length.\n */\nfunction closeUnclosedInlineCode(content: string): string {\n const lines = content.split('\\n')\n let inFence = false\n let fenceChar = ''\n let fenceLen = 0\n\n return lines.map((line) => {\n const trimmed = line.trimStart()\n if (!inFence) {\n const m = trimmed.match(/^(`{3,}|~{3,})/)\n if (m) {\n inFence = true\n fenceChar = m[1][0]!\n fenceLen = m[1].length\n return line\n }\n }\n else {\n const m = trimmed.match(/^(`{3,}|~{3,})\\s*$/)\n if (m && m[1][0] === fenceChar && m[1].length >= fenceLen) {\n inFence = false\n }\n return line\n }\n\n // Outside fenced code blocks — fix unclosed inline backticks\n let i = 0\n while (i < line.length) {\n if (line[i] === '`') {\n const seqStart = i\n while (i < line.length && line[i] === '`') i++\n const seqLen = i - seqStart\n let found = false\n let j = i\n while (j < line.length) {\n if (line[j] === '`') {\n const closeStart = j\n while (j < line.length && line[j] === '`') j++\n if (j - closeStart === seqLen) {\n found = true\n i = j\n break\n }\n }\n else {\n j++\n }\n }\n if (!found) {\n line = `${line}${'`'.repeat(seqLen)}`\n i = line.length\n }\n }\n else {\n i++\n }\n }\n return line\n }).join('\\n')\n}\n\n/**\n * Repair broken markdown syntax.\n * Fixes common issues in fetched documentation:\n * - Unclosed fenced code blocks\n * - Unclosed inline code spans\n * - Missing space after heading # markers\n * - Excessive consecutive blank lines\n * - Trailing whitespace\n */\nexport function repairMarkdown(content: string): string {\n if (!content)\n return content\n\n let result = content\n\n // Fix unclosed fenced code blocks (must run before other line-level fixes)\n result = closeUnclosedCodeBlocks(result)\n\n // Remove empty and duplicate code blocks (artifacts from fence recovery)\n result = cleanupCodeBlocks(result)\n\n // Fix unclosed inline code spans\n result = closeUnclosedInlineCode(result)\n\n // Fix heading spacing (only outside code blocks)\n result = processOutsideCodeBlocks(result, text =>\n text.replace(HEADING_NO_SPACE_RE, '$1 $2'))\n\n // Normalize excessive blank lines\n result = result.replace(EXCESSIVE_BLANKS_RE, '\\n\\n\\n')\n\n // Strip trailing whitespace\n result = result.replace(TRAILING_WHITESPACE_RE, '')\n\n return result\n}\n","/**\n * Cache storage operations\n */\n\nimport type { CachedDoc, CachedPackage } from './types'\nimport { existsSync, lstatSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from 'node:fs'\nimport { basename, join, resolve } from 'pathe'\nimport { sanitizeMarkdown } from '../core/sanitize'\nimport { REFERENCES_DIR } from './config'\nimport { getCacheDir } from './version'\n\n/** Safely create a symlink, validating target is under REFERENCES_DIR */\nfunction safeSymlink(target: string, linkPath: string): void {\n const resolved = resolve(target)\n if (!resolved.startsWith(REFERENCES_DIR))\n throw new Error(`Symlink target outside references dir: ${resolved}`)\n // Remove pre-existing symlink (check with lstat to detect symlinks)\n try {\n const stat = lstatSync(linkPath)\n if (stat.isSymbolicLink() || stat.isFile())\n unlinkSync(linkPath)\n }\n catch {}\n symlinkSync(target, linkPath, 'junction')\n}\n\n/**\n * Check if package is cached at given version\n */\nexport function isCached(name: string, version: string): boolean {\n return existsSync(getCacheDir(name, version))\n}\n\n/**\n * Ensure cache directories exist\n */\nexport function ensureCacheDir(): void {\n mkdirSync(REFERENCES_DIR, { recursive: true, mode: 0o700 })\n}\n\n/**\n * Write docs to cache, cleaning stale version dirs for the same package\n */\nexport function writeToCache(\n name: string,\n version: string,\n docs: CachedDoc[],\n): string {\n const cacheDir = getCacheDir(name, version)\n mkdirSync(cacheDir, { recursive: true, mode: 0o700 })\n\n // Clean stale cache dirs for same package with different version keys\n cleanStaleCacheDirs(name, version)\n\n for (const doc of docs) {\n const filePath = join(cacheDir, doc.path)\n mkdirSync(join(filePath, '..'), { recursive: true, mode: 0o700 })\n writeFileSync(filePath, sanitizeMarkdown(doc.content), { mode: 0o600 })\n }\n\n return cacheDir\n}\n\n/**\n * Remove stale cache dirs for same package but different version keys\n * e.g. @clack/prompts@1.0 vs @clack/prompts@1.0.0\n */\nfunction cleanStaleCacheDirs(name: string, version: string): void {\n const prefix = `${name}@`\n\n // For scoped packages, check inside the scope dir\n if (name.startsWith('@')) {\n const [scope, pkg] = name.split('/')\n const scopeDir = join(REFERENCES_DIR, scope!)\n if (!existsSync(scopeDir))\n return\n\n const scopePrefix = `${pkg}@`\n const currentDirName = basename(getCacheDir(name, version))\n\n for (const entry of readdirSync(scopeDir)) {\n if (entry.startsWith(scopePrefix) && entry !== currentDirName) {\n rmSync(join(scopeDir, entry), { recursive: true, force: true })\n }\n }\n }\n else {\n if (!existsSync(REFERENCES_DIR))\n return\n for (const entry of readdirSync(REFERENCES_DIR)) {\n if (entry.startsWith(prefix) && entry !== basename(getCacheDir(name, version))) {\n rmSync(join(REFERENCES_DIR, entry), { recursive: true, force: true })\n }\n }\n }\n}\n\n/**\n * Create .skilld directory with symlinked docs (only if external fetch needed)\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/\n * pkg -> node_modules/<pkg> (always, has package.json, README.md, dist/)\n * docs -> ~/.skilld/references/<pkg>@<version>/docs (only if fetched externally)\n *\n * The .skilld/ dirs are gitignored. After clone, `skilld install` recreates from lockfile.\n */\nexport function linkReferences(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const docsLinkPath = join(referencesDir, 'docs')\n const cachedDocsPath = join(cacheDir, 'docs')\n\n // Create references dir if needed\n mkdirSync(referencesDir, { recursive: true })\n\n // Symlink docs from cache\n if (existsSync(cachedDocsPath)) {\n safeSymlink(cachedDocsPath, docsLinkPath)\n }\n}\n\n/**\n * Create symlink from .skilld dir to cached issues\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/issues -> ~/.skilld/references/<pkg>@<version>/issues\n */\nexport function linkIssues(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const linkPath = join(referencesDir, 'issues')\n const cachedPath = join(cacheDir, 'issues')\n\n mkdirSync(referencesDir, { recursive: true })\n\n if (existsSync(cachedPath)) {\n safeSymlink(cachedPath, linkPath)\n }\n}\n\n/**\n * Create symlink from .skilld dir to cached discussions\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/discussions -> ~/.skilld/references/<pkg>@<version>/discussions\n */\nexport function linkDiscussions(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const linkPath = join(referencesDir, 'discussions')\n const cachedPath = join(cacheDir, 'discussions')\n\n mkdirSync(referencesDir, { recursive: true })\n\n if (existsSync(cachedPath)) {\n safeSymlink(cachedPath, linkPath)\n }\n}\n\n/**\n * Resolve the package directory: node_modules first, then cached dist fallback.\n * Returns the path if found, null otherwise.\n */\nexport function resolvePkgDir(name: string, cwd: string, version?: string): string | null {\n const nodeModulesPath = join(cwd, 'node_modules', name)\n if (existsSync(nodeModulesPath))\n return nodeModulesPath\n\n // Fallback: check cached npm dist\n if (version) {\n const cachedPkgDir = join(getCacheDir(name, version), 'pkg')\n if (existsSync(join(cachedPkgDir, 'package.json')))\n return cachedPkgDir\n }\n\n return null\n}\n\n/**\n * Create symlink from .skilld dir to package directory\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/pkg -> node_modules/<pkg> OR ~/.skilld/references/<pkg>@<version>/pkg\n *\n * This gives access to package.json, README.md, dist/, and any shipped docs/\n */\nexport function linkPkg(skillDir: string, name: string, cwd: string, version?: string): void {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return\n\n const referencesDir = join(skillDir, '.skilld')\n mkdirSync(referencesDir, { recursive: true })\n\n const pkgLinkPath = join(referencesDir, 'pkg')\n if (existsSync(pkgLinkPath)) {\n unlinkSync(pkgLinkPath)\n }\n symlinkSync(pkgPath, pkgLinkPath, 'junction')\n}\n\n/**\n * Create named symlink from .skilld dir to package directory.\n * Short name = last segment of package name (e.g., @vue/reactivity → pkg-reactivity)\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/pkg-<short> -> node_modules/<pkg>\n */\nexport function linkPkgNamed(skillDir: string, name: string, cwd: string, version?: string): void {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return\n\n const shortName = name.split('/').pop()!.toLowerCase()\n const referencesDir = join(skillDir, '.skilld')\n mkdirSync(referencesDir, { recursive: true })\n\n const linkPath = join(referencesDir, `pkg-${shortName}`)\n if (existsSync(linkPath))\n unlinkSync(linkPath)\n symlinkSync(pkgPath, linkPath, 'junction')\n}\n\n/**\n * Get key files from a package directory for display\n * Returns entry points + docs files\n */\nexport function getPkgKeyFiles(name: string, cwd: string, version?: string): string[] {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return []\n\n const files: string[] = []\n const pkgJsonPath = join(pkgPath, 'package.json')\n\n if (existsSync(pkgJsonPath)) {\n const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf-8'))\n\n // Entry points\n if (pkg.main)\n files.push(basename(pkg.main))\n if (pkg.module && pkg.module !== pkg.main)\n files.push(basename(pkg.module))\n }\n\n // Check for common doc files (case-insensitive readme match)\n const entries = readdirSync(pkgPath).filter(f =>\n /^readme\\.md$/i.test(f) || /^changelog\\.md$/i.test(f),\n )\n files.push(...entries)\n\n return [...new Set(files)]\n}\n\n/**\n * Check if package ships its own docs folder\n */\nexport interface ShippedSkill {\n skillName: string\n skillDir: string\n}\n\n/**\n * Check if package ships a skills/ directory with SKILL.md or _SKILL.md subdirs\n */\nexport function getShippedSkills(name: string, cwd: string, version?: string): ShippedSkill[] {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return []\n\n const skillsPath = join(pkgPath, 'skills')\n if (!existsSync(skillsPath))\n return []\n\n return readdirSync(skillsPath, { withFileTypes: true })\n .filter(d => d.isDirectory() && (existsSync(join(skillsPath, d.name, 'SKILL.md')) || existsSync(join(skillsPath, d.name, '_SKILL.md'))))\n .map(d => ({ skillName: d.name, skillDir: join(skillsPath, d.name) }))\n}\n\n/**\n * Create symlink from .skilld dir to cached sections (LLM-generated)\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/sections -> ~/.skilld/references/<pkg>@<version>/sections\n */\nexport function linkSections(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const linkPath = join(referencesDir, 'sections')\n const cachedPath = join(cacheDir, 'sections')\n\n mkdirSync(referencesDir, { recursive: true })\n\n if (existsSync(cachedPath)) {\n safeSymlink(cachedPath, linkPath)\n }\n}\n\n/**\n * Write LLM-generated section outputs to global cache for cross-project reuse\n *\n * Structure:\n * ~/.skilld/references/<pkg>@<version>/sections/_BEST_PRACTICES.md\n */\nexport function writeSections(name: string, version: string, sections: Array<{ file: string, content: string }>): void {\n const cacheDir = getCacheDir(name, version)\n const sectionsDir = join(cacheDir, 'sections')\n mkdirSync(sectionsDir, { recursive: true, mode: 0o700 })\n for (const { file, content } of sections) {\n writeFileSync(join(sectionsDir, file), content, { mode: 0o600 })\n }\n}\n\n/**\n * Read a cached section from the global references dir\n */\nexport function readCachedSection(name: string, version: string, file: string): string | null {\n const path = join(getCacheDir(name, version), 'sections', file)\n if (!existsSync(path))\n return null\n return readFileSync(path, 'utf-8')\n}\n\n/**\n * Create symlink from .skilld dir to cached releases\n *\n * Structure:\n * .claude/skills/<skill>/.skilld/releases -> ~/.skilld/references/<pkg>@<version>/releases\n */\nexport function linkReleases(skillDir: string, name: string, version: string): void {\n const cacheDir = getCacheDir(name, version)\n const referencesDir = join(skillDir, '.skilld')\n const releasesLinkPath = join(referencesDir, 'releases')\n const cachedReleasesPath = join(cacheDir, 'releases')\n\n mkdirSync(referencesDir, { recursive: true })\n\n if (existsSync(releasesLinkPath)) {\n unlinkSync(releasesLinkPath)\n }\n if (existsSync(cachedReleasesPath)) {\n symlinkSync(cachedReleasesPath, releasesLinkPath, 'junction')\n }\n}\n\n/**\n * Create symlink from skills dir to shipped skill dir\n */\nexport function linkShippedSkill(baseDir: string, skillName: string, targetDir: string): void {\n const linkPath = join(baseDir, skillName)\n if (existsSync(linkPath)) {\n const stat = lstatSync(linkPath)\n if (stat.isSymbolicLink())\n unlinkSync(linkPath)\n else rmSync(linkPath, { recursive: true, force: true })\n }\n symlinkSync(targetDir, linkPath)\n}\n\nexport function hasShippedDocs(name: string, cwd: string, version?: string): boolean {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return false\n\n const docsCandidates = ['docs', 'documentation', 'doc']\n for (const candidate of docsCandidates) {\n const docsPath = join(pkgPath, candidate)\n if (existsSync(docsPath))\n return true\n }\n return false\n}\n\n/**\n * List all cached packages\n */\nexport function listCached(): CachedPackage[] {\n if (!existsSync(REFERENCES_DIR))\n return []\n\n return readdirSync(REFERENCES_DIR)\n .filter(name => name.includes('@'))\n .map((dir) => {\n const [name, version] = dir.split('@')\n return { name: name!, version: version!, dir: join(REFERENCES_DIR, dir) }\n })\n}\n\n/**\n * Read cached docs for a package\n */\nexport function readCachedDocs(name: string, version: string): CachedDoc[] {\n const cacheDir = getCacheDir(name, version)\n if (!existsSync(cacheDir))\n return []\n\n const docs: CachedDoc[] = []\n\n function walk(dir: string, prefix = '') {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const entryPath = join(dir, entry.name)\n const relativePath = prefix ? `${prefix}/${entry.name}` : entry.name\n\n if (entry.isDirectory()) {\n walk(entryPath, relativePath)\n }\n else if (entry.name.endsWith('.md') || entry.name.endsWith('.mdx')) {\n docs.push({\n path: relativePath,\n content: readFileSync(entryPath, 'utf-8'),\n })\n }\n }\n }\n\n walk(cacheDir)\n return docs\n}\n\n/**\n * Clear cache for a specific package\n */\nexport function clearCache(name: string, version: string): boolean {\n const cacheDir = getCacheDir(name, version)\n if (!existsSync(cacheDir))\n return false\n\n rmSync(cacheDir, { recursive: true })\n return true\n}\n\n/**\n * Clear all cache\n */\nexport function clearAllCache(): number {\n const packages = listCached()\n for (const pkg of packages) {\n clearCache(pkg.name, pkg.version)\n }\n return packages.length\n}\n\n/**\n * List files in .skilld directory (pkg + docs) as relative paths for prompt context\n * Returns paths like ./.skilld/pkg/README.md, ./.skilld/docs/api.md\n */\nexport function listReferenceFiles(skillDir: string, maxDepth = 3): string[] {\n const referencesDir = join(skillDir, '.skilld')\n if (!existsSync(referencesDir))\n return []\n\n const files: string[] = []\n\n function walk(dir: string, depth: number) {\n if (depth > maxDepth)\n return\n try {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const full = join(dir, entry.name)\n if (entry.isDirectory() || entry.isSymbolicLink()) {\n try {\n const stat = statSync(full)\n if (stat.isDirectory()) {\n walk(full, depth + 1)\n continue\n }\n }\n catch { continue }\n }\n if (entry.name.endsWith('.md')) {\n files.push(full)\n }\n }\n }\n catch {\n // Broken symlink or permission error\n }\n }\n\n walk(referencesDir, 0)\n return files\n}\n"],"mappings":";;;AAYA,MAAM,gBAAgB;AAGtB,MAAM,kBAAkB;AAMxB,MAAM,uBAAuB;CAC3B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAMD,MAAM,sBAAsB;CAC1B;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAKD,SAAS,2BAA2B,MAAsB;AACxD,QAAO,KACJ,QAAQ,UAAU,IAAI,CACtB,QAAQ,UAAU,IAAI,CACtB,QAAQ,YAAY,IAAI,CACxB,QAAQ,YAAY,IAAI,CACxB,QAAQ,cAAc,IAAI,CAC1B,QAAQ,cAAc,IAAI;;AAI/B,SAAS,UAAU,MAAc,MAAwB;AACvD,KAAI,CAAC,KAAK,OACR,QAAO;CACT,MAAM,WAAW,KAAK,KAAK,IAAI;CAE/B,MAAM,WAAW,IAAI,OAAO,KAAK,SAAS,oCAAoC,KAAK;CACnF,IAAI,SAAS,KAAK,QAAQ,UAAU,GAAG;CAEvC,MAAM,eAAe,IAAI,OAAO,SAAS,SAAS,oBAAoB,KAAK;AAC3E,UAAS,OAAO,QAAQ,cAAc,GAAG;AACzC,QAAO;;AAIT,MAAM,oBAAoB;AAM1B,MAAM,mBAAmB;AAGzB,MAAM,wBAAwB;AAC9B,MAAM,gCAAgC;AAGtC,MAAM,oBAAoB;AAG1B,MAAM,iBAAiB;AAGvB,MAAM,yBAAyB;AAQ/B,SAAgB,yBAAyB,SAAiB,IAAsC;CAC9F,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,MAAM,SAAmB,EAAE;CAC3B,IAAI,gBAA0B,EAAE;CAChC,IAAI,aAAuB,EAAE;CAC7B,IAAI,cAAc;CAClB,IAAI,YAAY;CAChB,IAAI,WAAW;CAEf,SAAS,eAAe;AACtB,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAO,KAAK,GAAG,cAAc,KAAK,KAAK,CAAC,CAAC;AACzC,mBAAgB,EAAE;;;AAItB,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,WAAW;AAEhC,MAAI,CAAC,aAAa;GAChB,MAAM,QAAQ,QAAQ,MAAM,iBAAiB;AAC7C,OAAI,OAAO;AACT,kBAAc;AACd,kBAAc;AACd,gBAAY,MAAM,GAAG;AACrB,eAAW,MAAM,GAAG;AACpB,iBAAa,CAAC,KAAK;AACnB;;AAEF,iBAAc,KAAK,KAAK;SAErB;GACH,MAAM,QAAQ,QAAQ,MAAM,qBAAqB;AACjD,OAAI,SAAS,MAAM,GAAG,OAAO,aAAa,MAAM,GAAG,UAAU,UAAU;AAErE,WAAO,KAAK,WAAW,KAAK,KAAK,CAAC;AAClC,WAAO,KAAK,KAAK;AACjB,iBAAa,EAAE;AACf,kBAAc;AACd,gBAAY;AACZ,eAAW;AACX;;AAEF,cAAW,KAAK,KAAK;;;AAIzB,eAAc;AAGd,KAAI,eAAe,WAAW,SAAS,EACrC,QAAO,KAAK,GAAG,WAAW,KAAK,KAAK,CAAC,CAAC;AAGxC,QAAO,OAAO,KAAK,KAAK;;AAO1B,SAAgB,iBAAiB,SAAyB;AACxD,KAAI,CAAC,QACH,QAAO;CAGT,IAAI,SAAS,QAAQ,QAAQ,eAAe,GAAG;AAG/C,UAAS,OAAO,QAAQ,iBAAiB,GAAG;AAG5C,UAAS,UAAU,QAAQ,qBAAqB;AAGhD,UAAS,yBAAyB,SAAS,SAAS;EAElD,IAAI,IAAI,2BAA2B,KAAK;AACxC,MAAI,UAAU,GAAG,CAAC,GAAG,sBAAsB,GAAG,oBAAoB,CAAC;AAGnE,MAAI,EAAE,QAAQ,mBAAmB,GAAG;AAGpC,MAAI,EAAE,QAAQ,kBAAkB,KAAK;AAGrC,MAAI,EAAE,QAAQ,uBAAuB,GAAG;AACxC,MAAI,EAAE,QAAQ,+BAA+B,GAAG;AAGhD,MAAI,EAAE,QAAQ,mBAAmB,GAAG;AAGpC,MAAI,EAAE,QAAQ,gBAAgB,GAAG;AACjC,MAAI,EAAE,QAAQ,wBAAwB,GAAG;AAEzC,SAAO;GACP;AAEF,QAAO;;AAMT,MAAM,sBAAsB;AAG5B,MAAM,sBAAsB;AAG5B,MAAM,yBAAyB;AAG/B,MAAM,sBAAsB;AAM5B,SAAS,wBAAwB,SAAyB;CACxD,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,MAAM,SAAmB,EAAE;CAC3B,IAAI,cAAc;CAClB,IAAI,QAAQ;AAEZ,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,WAAW;AAChC,MAAI,CAAC,aAAa;GAChB,MAAM,QAAQ,QAAQ,MAAM,iBAAiB;AAC7C,OAAI,OAAO;AACT,kBAAc;AACd,YAAQ,MAAM,GAAG,GAAI,OAAO,MAAM,GAAG,OAAO;;SAG3C;GAEH,MAAM,QAAQ,QAAQ,MAAM,qBAAqB;AACjD,OAAI,SAAS,MAAM,GAAG,OAAO,MAAM,MAAM,MAAM,GAAG,UAAU,MAAM,QAAQ;AACxE,kBAAc;AACd,YAAQ;UAEL;IAGH,MAAM,YAAY,QAAQ,MAAM,mBAAmB;AACnD,QAAI,aAAa,UAAU,GAAG,OAAO,MAAM,MAAM,UAAU,GAAG,WAAW,MAAM,OAC7E,QAAO,KAAK,MAAM;aAIX,oBAAoB,KAAK,QAAQ,EAAE;AAC1C,YAAO,KAAK,MAAM;AAClB,mBAAc;AACd,aAAQ;;;;AAId,SAAO,KAAK,KAAK;;AAInB,KAAI,aAAa;AAEf,MAAI,OAAO,SAAS,KAAK,OAAO,OAAO,SAAS,OAAO,GACrD,QAAO,KAAK,GAAG;AACjB,SAAO,KAAK,MAAM;;AAGpB,QAAO,OAAO,KAAK,KAAK;;AAQ1B,SAAS,kBAAkB,SAAyB;CAClD,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,MAAM,2BAAW,IAAI,KAAa;CAClC,IAAI;CACJ,IAAI,IAAI;AAER,QAAO,IAAI,MAAM,QAAQ;EACvB,MAAM,UAAU,MAAM,GAAI,WAAW;EACrC,MAAM,KAAK,QAAQ,MAAM,iBAAiB;AAC1C,MAAI,CAAC,IAAI;AAEP,OAAI,QACF,mBAAkB,KAAA;AACpB;AACA;;EAGF,MAAM,QAAQ,GAAG,GAAG;EACpB,MAAM,OAAO,GAAG,GAAG;EACnB,MAAM,UAAU;AAChB;EAEA,IAAI,WAAW;AACf,SAAO,IAAI,MAAM,QAAQ;GAEvB,MAAM,KADK,MAAM,GAAI,WAAW,CAClB,MAAM,qBAAqB;AACzC,OAAI,MAAM,GAAG,GAAG,OAAO,SAAS,GAAG,GAAG,UAAU,MAAM;AACpD,eAAW;AACX;AACA;;AAEF;;AAGF,MAAI,aAAa,GACf;EAEF,MAAM,QAAQ,MAAM,MAAM,UAAU,GAAG,SAAS,CAAC,KAAK,KAAK,CAAC,MAAM;AAElE,MAAI,CAAC,MACH,MAAK,IAAI,IAAI,SAAS,KAAK,UAAU,IAAK,UAAS,IAAI,EAAE;WAElD,UAAU,gBACjB,MAAK,IAAI,IAAI,SAAS,KAAK,UAAU,IAAK,UAAS,IAAI,EAAE;MAGzD,mBAAkB;;AAItB,KAAI,CAAC,SAAS,KACZ,QAAO;AACT,QAAO,MAAM,QAAQ,GAAG,QAAQ,CAAC,SAAS,IAAI,IAAI,CAAC,CAAC,KAAK,KAAK;;AAQhE,SAAS,wBAAwB,SAAyB;CACxD,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,IAAI,UAAU;CACd,IAAI,YAAY;CAChB,IAAI,WAAW;AAEf,QAAO,MAAM,KAAK,SAAS;EACzB,MAAM,UAAU,KAAK,WAAW;AAChC,MAAI,CAAC,SAAS;GACZ,MAAM,IAAI,QAAQ,MAAM,iBAAiB;AACzC,OAAI,GAAG;AACL,cAAU;AACV,gBAAY,EAAE,GAAG;AACjB,eAAW,EAAE,GAAG;AAChB,WAAO;;SAGN;GACH,MAAM,IAAI,QAAQ,MAAM,qBAAqB;AAC7C,OAAI,KAAK,EAAE,GAAG,OAAO,aAAa,EAAE,GAAG,UAAU,SAC/C,WAAU;AAEZ,UAAO;;EAIT,IAAI,IAAI;AACR,SAAO,IAAI,KAAK,OACd,KAAI,KAAK,OAAO,KAAK;GACnB,MAAM,WAAW;AACjB,UAAO,IAAI,KAAK,UAAU,KAAK,OAAO,IAAK;GAC3C,MAAM,SAAS,IAAI;GACnB,IAAI,QAAQ;GACZ,IAAI,IAAI;AACR,UAAO,IAAI,KAAK,OACd,KAAI,KAAK,OAAO,KAAK;IACnB,MAAM,aAAa;AACnB,WAAO,IAAI,KAAK,UAAU,KAAK,OAAO,IAAK;AAC3C,QAAI,IAAI,eAAe,QAAQ;AAC7B,aAAQ;AACR,SAAI;AACJ;;SAIF;AAGJ,OAAI,CAAC,OAAO;AACV,WAAO,GAAG,OAAO,IAAI,OAAO,OAAO;AACnC,QAAI,KAAK;;QAIX;AAGJ,SAAO;GACP,CAAC,KAAK,KAAK;;AAYf,SAAgB,eAAe,SAAyB;AACtD,KAAI,CAAC,QACH,QAAO;CAET,IAAI,SAAS;AAGb,UAAS,wBAAwB,OAAO;AAGxC,UAAS,kBAAkB,OAAO;AAGlC,UAAS,wBAAwB,OAAO;AAGxC,UAAS,yBAAyB,SAAQ,SACxC,KAAK,QAAQ,qBAAqB,QAAQ,CAAC;AAG7C,UAAS,OAAO,QAAQ,qBAAqB,SAAS;AAGtD,UAAS,OAAO,QAAQ,wBAAwB,GAAG;AAEnD,QAAO;;AC3aT,SAAS,YAAY,QAAgB,UAAwB;CAC3D,MAAM,WAAW,QAAQ,OAAO;AAChC,KAAI,CAAC,SAAS,WAAW,eAAe,CACtC,OAAM,IAAI,MAAM,0CAA0C,WAAW;AAEvE,KAAI;EACF,MAAM,OAAO,UAAU,SAAS;AAChC,MAAI,KAAK,gBAAgB,IAAI,KAAK,QAAQ,CACxC,YAAW,SAAS;SAElB;AACN,aAAY,QAAQ,UAAU,WAAW;;AAM3C,SAAgB,SAAS,MAAc,SAA0B;AAC/D,QAAO,WAAW,YAAY,MAAM,QAAQ,CAAC;;AAM/C,SAAgB,iBAAuB;AACrC,WAAU,gBAAgB;EAAE,WAAW;EAAM,MAAM;EAAO,CAAC;;AAM7D,SAAgB,aACd,MACA,SACA,MACQ;CACR,MAAM,WAAW,YAAY,MAAM,QAAQ;AAC3C,WAAU,UAAU;EAAE,WAAW;EAAM,MAAM;EAAO,CAAC;AAGrD,qBAAoB,MAAM,QAAQ;AAElC,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,WAAW,KAAK,UAAU,IAAI,KAAK;AACzC,YAAU,KAAK,UAAU,KAAK,EAAE;GAAE,WAAW;GAAM,MAAM;GAAO,CAAC;AACjE,gBAAc,UAAU,iBAAiB,IAAI,QAAQ,EAAE,EAAE,MAAM,KAAO,CAAC;;AAGzE,QAAO;;AAOT,SAAS,oBAAoB,MAAc,SAAuB;CAChE,MAAM,SAAS,GAAG,KAAK;AAGvB,KAAI,KAAK,WAAW,IAAI,EAAE;EACxB,MAAM,CAAC,OAAO,OAAO,KAAK,MAAM,IAAI;EACpC,MAAM,WAAW,KAAK,gBAAgB,MAAO;AAC7C,MAAI,CAAC,WAAW,SAAS,CACvB;EAEF,MAAM,cAAc,GAAG,IAAI;EAC3B,MAAM,iBAAiB,SAAS,YAAY,MAAM,QAAQ,CAAC;AAE3D,OAAK,MAAM,SAAS,YAAY,SAAS,CACvC,KAAI,MAAM,WAAW,YAAY,IAAI,UAAU,eAC7C,QAAO,KAAK,UAAU,MAAM,EAAE;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;QAIhE;AACH,MAAI,CAAC,WAAW,eAAe,CAC7B;AACF,OAAK,MAAM,SAAS,YAAY,eAAe,CAC7C,KAAI,MAAM,WAAW,OAAO,IAAI,UAAU,SAAS,YAAY,MAAM,QAAQ,CAAC,CAC5E,QAAO,KAAK,gBAAgB,MAAM,EAAE;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;;AAgB7E,SAAgB,eAAe,UAAkB,MAAc,SAAuB;CACpF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,eAAe,KAAK,eAAe,OAAO;CAChD,MAAM,iBAAiB,KAAK,UAAU,OAAO;AAG7C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAG7C,KAAI,WAAW,eAAe,CAC5B,aAAY,gBAAgB,aAAa;;AAU7C,SAAgB,WAAW,UAAkB,MAAc,SAAuB;CAChF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,WAAW,KAAK,eAAe,SAAS;CAC9C,MAAM,aAAa,KAAK,UAAU,SAAS;AAE3C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAE7C,KAAI,WAAW,WAAW,CACxB,aAAY,YAAY,SAAS;;AAUrC,SAAgB,gBAAgB,UAAkB,MAAc,SAAuB;CACrF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,WAAW,KAAK,eAAe,cAAc;CACnD,MAAM,aAAa,KAAK,UAAU,cAAc;AAEhD,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAE7C,KAAI,WAAW,WAAW,CACxB,aAAY,YAAY,SAAS;;AAQrC,SAAgB,cAAc,MAAc,KAAa,SAAiC;CACxF,MAAM,kBAAkB,KAAK,KAAK,gBAAgB,KAAK;AACvD,KAAI,WAAW,gBAAgB,CAC7B,QAAO;AAGT,KAAI,SAAS;EACX,MAAM,eAAe,KAAK,YAAY,MAAM,QAAQ,EAAE,MAAM;AAC5D,MAAI,WAAW,KAAK,cAAc,eAAe,CAAC,CAChD,QAAO;;AAGX,QAAO;;AAWT,SAAgB,QAAQ,UAAkB,MAAc,KAAa,SAAwB;CAC3F,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH;CAEF,MAAM,gBAAgB,KAAK,UAAU,UAAU;AAC/C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;CAE7C,MAAM,cAAc,KAAK,eAAe,MAAM;AAC9C,KAAI,WAAW,YAAY,CACzB,YAAW,YAAY;AAEzB,aAAY,SAAS,aAAa,WAAW;;AAU/C,SAAgB,aAAa,UAAkB,MAAc,KAAa,SAAwB;CAChG,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH;CAEF,MAAM,YAAY,KAAK,MAAM,IAAI,CAAC,KAAK,CAAE,aAAa;CACtD,MAAM,gBAAgB,KAAK,UAAU,UAAU;AAC/C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;CAE7C,MAAM,WAAW,KAAK,eAAe,OAAO,YAAY;AACxD,KAAI,WAAW,SAAS,CACtB,YAAW,SAAS;AACtB,aAAY,SAAS,UAAU,WAAW;;AAO5C,SAAgB,eAAe,MAAc,KAAa,SAA4B;CACpF,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO,EAAE;CAEX,MAAM,QAAkB,EAAE;CAC1B,MAAM,cAAc,KAAK,SAAS,eAAe;AAEjD,KAAI,WAAW,YAAY,EAAE;EAC3B,MAAM,MAAM,KAAK,MAAM,aAAa,aAAa,QAAQ,CAAC;AAG1D,MAAI,IAAI,KACN,OAAM,KAAK,SAAS,IAAI,KAAK,CAAC;AAChC,MAAI,IAAI,UAAU,IAAI,WAAW,IAAI,KACnC,OAAM,KAAK,SAAS,IAAI,OAAO,CAAC;;CAIpC,MAAM,UAAU,YAAY,QAAQ,CAAC,QAAO,MAC1C,gBAAgB,KAAK,EAAE,IAAI,mBAAmB,KAAK,EAAE,CACtD;AACD,OAAM,KAAK,GAAG,QAAQ;AAEtB,QAAO,CAAC,GAAG,IAAI,IAAI,MAAM,CAAC;;AAc5B,SAAgB,iBAAiB,MAAc,KAAa,SAAkC;CAC5F,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO,EAAE;CAEX,MAAM,aAAa,KAAK,SAAS,SAAS;AAC1C,KAAI,CAAC,WAAW,WAAW,CACzB,QAAO,EAAE;AAEX,QAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CACpD,QAAO,MAAK,EAAE,aAAa,KAAK,WAAW,KAAK,YAAY,EAAE,MAAM,WAAW,CAAC,IAAI,WAAW,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC,EAAE,CACvI,KAAI,OAAM;EAAE,WAAW,EAAE;EAAM,UAAU,KAAK,YAAY,EAAE,KAAA;EAAO,EAAE;;AAS1E,SAAgB,aAAa,UAAkB,MAAc,SAAuB;CAClF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,WAAW,KAAK,eAAe,WAAW;CAChD,MAAM,aAAa,KAAK,UAAU,WAAW;AAE7C,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAE7C,KAAI,WAAW,WAAW,CACxB,aAAY,YAAY,SAAS;;AAUrC,SAAgB,cAAc,MAAc,SAAiB,UAA0D;CAErH,MAAM,cAAc,KADH,YAAY,MAAM,QAAQ,EACR,WAAW;AAC9C,WAAU,aAAa;EAAE,WAAW;EAAM,MAAM;EAAO,CAAC;AACxD,MAAK,MAAM,EAAE,MAAM,aAAa,SAC9B,eAAc,KAAK,aAAa,KAAK,EAAE,SAAS,EAAE,MAAM,KAAO,CAAC;;AAOpE,SAAgB,kBAAkB,MAAc,SAAiB,MAA6B;CAC5F,MAAM,OAAO,KAAK,YAAY,MAAM,QAAQ,EAAE,YAAY,KAAK;AAC/D,KAAI,CAAC,WAAW,KAAK,CACnB,QAAO;AACT,QAAO,aAAa,MAAM,QAAQ;;AASpC,SAAgB,aAAa,UAAkB,MAAc,SAAuB;CAClF,MAAM,WAAW,YAAY,MAAM,QAAQ;CAC3C,MAAM,gBAAgB,KAAK,UAAU,UAAU;CAC/C,MAAM,mBAAmB,KAAK,eAAe,WAAW;CACxD,MAAM,qBAAqB,KAAK,UAAU,WAAW;AAErD,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAE7C,KAAI,WAAW,iBAAiB,CAC9B,YAAW,iBAAiB;AAE9B,KAAI,WAAW,mBAAmB,CAChC,aAAY,oBAAoB,kBAAkB,WAAW;;AAOjE,SAAgB,iBAAiB,SAAiB,WAAmB,WAAyB;CAC5F,MAAM,WAAW,KAAK,SAAS,UAAU;AACzC,KAAI,WAAW,SAAS,CAEtB,KADa,UAAU,SAAS,CACvB,gBAAgB,CACvB,YAAW,SAAS;KACjB,QAAO,UAAU;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAEzD,aAAY,WAAW,SAAS;;AAGlC,SAAgB,eAAe,MAAc,KAAa,SAA2B;CACnF,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO;AAGT,MAAK,MAAM,aADY;EAAC;EAAQ;EAAiB;EAAM,CAGrD,KAAI,WADa,KAAK,SAAS,UAAU,CACjB,CACtB,QAAO;AAEX,QAAO;;AAMT,SAAgB,aAA8B;AAC5C,KAAI,CAAC,WAAW,eAAe,CAC7B,QAAO,EAAE;AAEX,QAAO,YAAY,eAAe,CAC/B,QAAO,SAAQ,KAAK,SAAS,IAAI,CAAC,CAClC,KAAK,QAAQ;EACZ,MAAM,CAAC,MAAM,WAAW,IAAI,MAAM,IAAI;AACtC,SAAO;GAAQ;GAAgB;GAAU,KAAK,KAAK,gBAAgB,IAAA;GAAM;GACzE;;AAMN,SAAgB,eAAe,MAAc,SAA8B;CACzE,MAAM,WAAW,YAAY,MAAM,QAAQ;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO,EAAE;CAEX,MAAM,OAAoB,EAAE;CAE5B,SAAS,KAAK,KAAa,SAAS,IAAI;AACtC,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,YAAY,KAAK,KAAK,MAAM,KAAK;GACvC,MAAM,eAAe,SAAS,GAAG,OAAO,GAAG,MAAM,SAAS,MAAM;AAEhE,OAAI,MAAM,aAAa,CACrB,MAAK,WAAW,aAAa;YAEtB,MAAM,KAAK,SAAS,MAAM,IAAI,MAAM,KAAK,SAAS,OAAO,CAChE,MAAK,KAAK;IACR,MAAM;IACN,SAAS,aAAa,WAAW,QAAA;IAClC,CAAC;;;AAKR,MAAK,SAAS;AACd,QAAO;;AAMT,SAAgB,WAAW,MAAc,SAA0B;CACjE,MAAM,WAAW,YAAY,MAAM,QAAQ;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;AAET,QAAO,UAAU,EAAE,WAAW,MAAM,CAAC;AACrC,QAAO;;AAMT,SAAgB,gBAAwB;CACtC,MAAM,WAAW,YAAY;AAC7B,MAAK,MAAM,OAAO,SAChB,YAAW,IAAI,MAAM,IAAI,QAAQ;AAEnC,QAAO,SAAS;;AAOlB,SAAgB,mBAAmB,UAAkB,WAAW,GAAa;CAC3E,MAAM,gBAAgB,KAAK,UAAU,UAAU;AAC/C,KAAI,CAAC,WAAW,cAAc,CAC5B,QAAO,EAAE;CAEX,MAAM,QAAkB,EAAE;CAE1B,SAAS,KAAK,KAAa,OAAe;AACxC,MAAI,QAAQ,SACV;AACF,MAAI;AACF,QAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;IAC7D,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,QAAI,MAAM,aAAa,IAAI,MAAM,gBAAgB,CAC/C,KAAI;AAEF,SADa,SAAS,KAAK,CAClB,aAAa,EAAE;AACtB,WAAK,MAAM,QAAQ,EAAE;AACrB;;YAGE;AAAE;;AAEV,QAAI,MAAM,KAAK,SAAS,MAAM,CAC5B,OAAM,KAAK,KAAK;;UAIhB;;AAKR,MAAK,eAAe,EAAE;AACtB,QAAO"}