skilld 1.4.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +54 -4
  2. package/dist/_chunks/agent.mjs +2 -1
  3. package/dist/_chunks/agent.mjs.map +1 -1
  4. package/dist/_chunks/assemble.mjs +1 -0
  5. package/dist/_chunks/assemble.mjs.map +1 -1
  6. package/dist/_chunks/author.mjs +478 -0
  7. package/dist/_chunks/author.mjs.map +1 -0
  8. package/dist/_chunks/cli-helpers.mjs +133 -2
  9. package/dist/_chunks/cli-helpers.mjs.map +1 -1
  10. package/dist/_chunks/detect.mjs.map +1 -1
  11. package/dist/_chunks/index2.d.mts +2 -0
  12. package/dist/_chunks/index2.d.mts.map +1 -1
  13. package/dist/_chunks/install.mjs +7 -17
  14. package/dist/_chunks/install.mjs.map +1 -1
  15. package/dist/_chunks/list.mjs +2 -1
  16. package/dist/_chunks/list.mjs.map +1 -1
  17. package/dist/_chunks/lockfile.mjs +140 -0
  18. package/dist/_chunks/lockfile.mjs.map +1 -0
  19. package/dist/_chunks/prepare.mjs +94 -0
  20. package/dist/_chunks/prepare.mjs.map +1 -0
  21. package/dist/_chunks/prompts.mjs +32 -43
  22. package/dist/_chunks/prompts.mjs.map +1 -1
  23. package/dist/_chunks/sanitize.mjs.map +1 -1
  24. package/dist/_chunks/search-interactive.mjs +1 -0
  25. package/dist/_chunks/search-interactive.mjs.map +1 -1
  26. package/dist/_chunks/search.mjs +146 -9
  27. package/dist/_chunks/search.mjs.map +1 -1
  28. package/dist/_chunks/setup.mjs +1 -1
  29. package/dist/_chunks/skills.mjs +28 -142
  30. package/dist/_chunks/skills.mjs.map +1 -1
  31. package/dist/_chunks/sources.mjs +4 -2
  32. package/dist/_chunks/sources.mjs.map +1 -1
  33. package/dist/_chunks/sync-shared.mjs +14 -0
  34. package/dist/_chunks/sync-shared2.mjs +1054 -0
  35. package/dist/_chunks/sync-shared2.mjs.map +1 -0
  36. package/dist/_chunks/sync.mjs +72 -1065
  37. package/dist/_chunks/sync.mjs.map +1 -1
  38. package/dist/_chunks/uninstall.mjs +5 -3
  39. package/dist/_chunks/uninstall.mjs.map +1 -1
  40. package/dist/agent/index.d.mts +4 -2
  41. package/dist/agent/index.d.mts.map +1 -1
  42. package/dist/cli.mjs +76 -10
  43. package/dist/cli.mjs.map +1 -1
  44. package/package.json +7 -6
@@ -1 +1 @@
1
- {"version":3,"file":"assemble.mjs","names":[],"sources":["../../src/commands/assemble.ts"],"sourcesContent":["/**\n * `skilld assemble` — merge pasted LLM output back into SKILL.md\n *\n * Auto-discovers skill directories with pending output files when run without arguments.\n */\n\nimport type { SkillSection } from '../agent/index.ts'\nimport { existsSync, readdirSync, readFileSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { defineCommand } from 'citty'\nimport { join, relative, resolve } from 'pathe'\nimport { cleanSectionOutput } from '../agent/clis/index.ts'\nimport {\n extractMarkedSections,\n getSectionValidator,\n SECTION_MERGE_ORDER,\n SECTION_OUTPUT_FILES,\n wrapSection,\n} from '../agent/index.ts'\nimport { iterateSkills } from '../core/skills.ts'\n\nconst OUTPUT_FILE_SET = new Set(Object.values(SECTION_OUTPUT_FILES))\n\n/**\n * Find installed skill dirs that have pending section output files.\n */\nfunction discoverSkillDirsWithOutputs(): string[] {\n const dirs: string[] = []\n for (const skill of iterateSkills({})) {\n if (readdirSync(skill.dir).some(f => OUTPUT_FILE_SET.has(f)))\n dirs.push(skill.dir)\n }\n return dirs\n}\n\nexport async function assembleCommand(dir: string | undefined): Promise<void> {\n const cwd = process.cwd()\n\n let dirs: string[]\n if (dir) {\n dirs = [resolve(cwd, dir)]\n }\n else {\n // Check cwd first — if it has SKILL.md + output files, use it\n if (existsSync(join(cwd, 'SKILL.md'))\n && readdirSync(cwd).some(f => OUTPUT_FILE_SET.has(f))) {\n dirs = [cwd]\n }\n else {\n dirs = discoverSkillDirsWithOutputs()\n if (dirs.length === 0) {\n p.log.error('No skill directories with output files found. Run `skilld add` first.')\n return\n }\n }\n }\n\n for (const targetDir of dirs)\n assembleDir(targetDir, cwd)\n}\n\nfunction assembleDir(targetDir: string, cwd: string): void {\n if (!existsSync(targetDir)) {\n p.log.error(`Directory not found: ${targetDir}`)\n return\n }\n\n const skillMdPath = join(targetDir, 'SKILL.md')\n if (!existsSync(skillMdPath)) {\n p.log.error(`No SKILL.md found in ${targetDir}`)\n return\n }\n\n const existingSkillMd = readFileSync(skillMdPath, 'utf-8')\n\n // Find and read section output files\n const sections: Array<{ section: SkillSection, content: string }> = []\n const warnings: string[] = []\n\n for (const [section, outputFile] of Object.entries(SECTION_OUTPUT_FILES) as Array<[SkillSection, string]>) {\n const filePath = join(targetDir, outputFile)\n if (!existsSync(filePath))\n continue\n\n const raw = readFileSync(filePath, 'utf-8').trim()\n if (!raw) {\n p.log.warn(`Empty file: ${outputFile}`)\n continue\n }\n\n const cleaned = cleanSectionOutput(raw)\n if (!cleaned) {\n const missing: string[] = []\n if (!/^##\\s/m.test(raw))\n missing.push('h2 heading (## ...)')\n if (!/^- (?:BREAKING|DEPRECATED|NEW): /m.test(raw))\n missing.push('change label (- BREAKING/DEPRECATED/NEW: ...)')\n if (!/\\[source\\]/.test(raw))\n missing.push('[source] link')\n p.log.warn(`${outputFile}: content rejected — missing ${missing.join(', ')}`)\n continue\n }\n\n const validator = getSectionValidator(section)\n if (validator) {\n for (const w of validator(cleaned))\n warnings.push(`${section}: ${w.warning}`)\n }\n\n sections.push({ section, content: cleaned })\n p.log.success(`Loaded ${outputFile}`)\n }\n\n if (sections.length === 0) {\n p.log.warn(`No section output files in ${relative(cwd, targetDir)}. Expected: ${Object.values(SECTION_OUTPUT_FILES).join(', ')}`)\n return\n }\n\n for (const w of warnings)\n p.log.warn(`\\x1B[33m${w}\\x1B[0m`)\n\n // Wrap each section with comment markers\n const wrappedSections: Array<{ section: SkillSection, wrapped: string }> = []\n for (const section of SECTION_MERGE_ORDER) {\n const result = sections.find(s => s.section === section)\n if (result)\n wrappedSections.push({ section, wrapped: wrapSection(section, result.content) })\n }\n\n // Try marker-based replacement first (re-assembly of previously assembled SKILL.md)\n const existingMarkers = extractMarkedSections(existingSkillMd)\n let newSkillMd: string\n\n if (existingMarkers.size > 0) {\n // Replace existing marked sections in-place, append new ones at the end\n newSkillMd = existingSkillMd\n // Process in reverse offset order to preserve indices\n const replacements = wrappedSections\n .filter(s => existingMarkers.has(s.section))\n .sort((a, b) => existingMarkers.get(b.section)!.start - existingMarkers.get(a.section)!.start)\n for (const { section, wrapped } of replacements) {\n const { start, end } = existingMarkers.get(section)!\n newSkillMd = newSkillMd.slice(0, start) + wrapped + newSkillMd.slice(end)\n }\n // Append sections that don't have existing markers\n const newSections = wrappedSections.filter(s => !existingMarkers.has(s.section))\n if (newSections.length > 0)\n newSkillMd = `${newSkillMd.trimEnd()}\\n\\n${newSections.map(s => s.wrapped).join('\\n\\n')}\\n`\n }\n else {\n // First assembly — find header boundary and append all sections\n const body = wrappedSections.map(s => s.wrapped).join('\\n\\n')\n const fmEnd = existingSkillMd.indexOf('\\n---\\n', 4)\n const afterFm = fmEnd !== -1 ? existingSkillMd.slice(fmEnd + 5) : existingSkillMd\n\n const firstLlmHeading = body.match(/^## .+$/m)?.[0]\n let headerPart = afterFm\n if (firstLlmHeading) {\n const idx = afterFm.indexOf(firstLlmHeading)\n if (idx !== -1)\n headerPart = afterFm.slice(0, idx)\n }\n\n const fmPart = fmEnd !== -1 ? existingSkillMd.slice(0, fmEnd + 5) : ''\n const cleanHeader = headerPart.trimEnd()\n newSkillMd = fmPart\n ? `${fmPart}${cleanHeader}\\n\\n${body}\\n`\n : `${cleanHeader}\\n\\n${body}\\n`\n }\n\n writeFileSync(skillMdPath, newSkillMd)\n p.log.success(`Updated ${relative(cwd, skillMdPath)} with ${sections.length} section(s)`)\n}\n\nexport const assembleCommandDef = defineCommand({\n meta: { name: 'assemble', description: 'Merge enhancement output files into SKILL.md' },\n args: {\n dir: {\n type: 'positional',\n description: 'Skill directory with output files (auto-discovers installed skills)',\n required: false,\n },\n },\n async run({ args }) {\n await assembleCommand(args.dir)\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;AAqBA,MAAM,kBAAkB,IAAI,IAAI,OAAO,OAAO,qBAAqB,CAAC;;;;AAKpE,SAAS,+BAAyC;CAChD,MAAM,OAAiB,EAAE;AACzB,MAAK,MAAM,SAAS,cAAc,EAAE,CAAC,CACnC,KAAI,YAAY,MAAM,IAAI,CAAC,MAAK,MAAK,gBAAgB,IAAI,EAAE,CAAC,CAC1D,MAAK,KAAK,MAAM,IAAI;AAExB,QAAO;;AAGT,eAAsB,gBAAgB,KAAwC;CAC5E,MAAM,MAAM,QAAQ,KAAK;CAEzB,IAAI;AACJ,KAAI,IACF,QAAO,CAAC,QAAQ,KAAK,IAAI,CAAC;UAItB,WAAW,KAAK,KAAK,WAAW,CAAC,IAChC,YAAY,IAAI,CAAC,MAAK,MAAK,gBAAgB,IAAI,EAAE,CAAC,CACrD,QAAO,CAAC,IAAI;MAET;AACH,SAAO,8BAA8B;AACrC,MAAI,KAAK,WAAW,GAAG;AACrB,KAAE,IAAI,MAAM,wEAAwE;AACpF;;;AAKN,MAAK,MAAM,aAAa,KACtB,aAAY,WAAW,IAAI;;AAG/B,SAAS,YAAY,WAAmB,KAAmB;AACzD,KAAI,CAAC,WAAW,UAAU,EAAE;AAC1B,IAAE,IAAI,MAAM,wBAAwB,YAAY;AAChD;;CAGF,MAAM,cAAc,KAAK,WAAW,WAAW;AAC/C,KAAI,CAAC,WAAW,YAAY,EAAE;AAC5B,IAAE,IAAI,MAAM,wBAAwB,YAAY;AAChD;;CAGF,MAAM,kBAAkB,aAAa,aAAa,QAAQ;CAG1D,MAAM,WAA8D,EAAE;CACtE,MAAM,WAAqB,EAAE;AAE7B,MAAK,MAAM,CAAC,SAAS,eAAe,OAAO,QAAQ,qBAAqB,EAAmC;EACzG,MAAM,WAAW,KAAK,WAAW,WAAW;AAC5C,MAAI,CAAC,WAAW,SAAS,CACvB;EAEF,MAAM,MAAM,aAAa,UAAU,QAAQ,CAAC,MAAM;AAClD,MAAI,CAAC,KAAK;AACR,KAAE,IAAI,KAAK,eAAe,aAAa;AACvC;;EAGF,MAAM,UAAU,mBAAmB,IAAI;AACvC,MAAI,CAAC,SAAS;GACZ,MAAM,UAAoB,EAAE;AAC5B,OAAI,CAAC,SAAS,KAAK,IAAI,CACrB,SAAQ,KAAK,sBAAsB;AACrC,OAAI,CAAC,oCAAoC,KAAK,IAAI,CAChD,SAAQ,KAAK,gDAAgD;AAC/D,OAAI,CAAC,aAAa,KAAK,IAAI,CACzB,SAAQ,KAAK,gBAAgB;AAC/B,KAAE,IAAI,KAAK,GAAG,WAAW,+BAA+B,QAAQ,KAAK,KAAK,GAAG;AAC7E;;EAGF,MAAM,YAAY,oBAAoB,QAAQ;AAC9C,MAAI,UACF,MAAK,MAAM,KAAK,UAAU,QAAQ,CAChC,UAAS,KAAK,GAAG,QAAQ,IAAI,EAAE,UAAU;AAG7C,WAAS,KAAK;GAAE;GAAS,SAAS;GAAS,CAAC;AAC5C,IAAE,IAAI,QAAQ,UAAU,aAAa;;AAGvC,KAAI,SAAS,WAAW,GAAG;AACzB,IAAE,IAAI,KAAK,8BAA8B,SAAS,KAAK,UAAU,CAAC,cAAc,OAAO,OAAO,qBAAqB,CAAC,KAAK,KAAK,GAAG;AACjI;;AAGF,MAAK,MAAM,KAAK,SACd,GAAE,IAAI,KAAK,WAAW,EAAE,SAAS;CAGnC,MAAM,kBAAqE,EAAE;AAC7E,MAAK,MAAM,WAAW,qBAAqB;EACzC,MAAM,SAAS,SAAS,MAAK,MAAK,EAAE,YAAY,QAAQ;AACxD,MAAI,OACF,iBAAgB,KAAK;GAAE;GAAS,SAAS,YAAY,SAAS,OAAO,QAAA;GAAU,CAAC;;CAIpF,MAAM,kBAAkB,sBAAsB,gBAAgB;CAC9D,IAAI;AAEJ,KAAI,gBAAgB,OAAO,GAAG;AAE5B,eAAa;EAEb,MAAM,eAAe,gBAClB,QAAO,MAAK,gBAAgB,IAAI,EAAE,QAAQ,CAAC,CAC3C,MAAM,GAAG,MAAM,gBAAgB,IAAI,EAAE,QAAQ,CAAE,QAAQ,gBAAgB,IAAI,EAAE,QAAQ,CAAE,MAAM;AAChG,OAAK,MAAM,EAAE,SAAS,aAAa,cAAc;GAC/C,MAAM,EAAE,OAAO,QAAQ,gBAAgB,IAAI,QAAQ;AACnD,gBAAa,WAAW,MAAM,GAAG,MAAM,GAAG,UAAU,WAAW,MAAM,IAAI;;EAG3E,MAAM,cAAc,gBAAgB,QAAO,MAAK,CAAC,gBAAgB,IAAI,EAAE,QAAQ,CAAC;AAChF,MAAI,YAAY,SAAS,EACvB,cAAa,GAAG,WAAW,SAAS,CAAC,MAAM,YAAY,KAAI,MAAK,EAAE,QAAQ,CAAC,KAAK,OAAO,CAAC;QAEvF;EAEH,MAAM,OAAO,gBAAgB,KAAI,MAAK,EAAE,QAAQ,CAAC,KAAK,OAAO;EAC7D,MAAM,QAAQ,gBAAgB,QAAQ,WAAW,EAAE;EACnD,MAAM,UAAU,UAAU,KAAK,gBAAgB,MAAM,QAAQ,EAAE,GAAG;EAElE,MAAM,kBAAkB,KAAK,MAAM,WAAW,GAAG;EACjD,IAAI,aAAa;AACjB,MAAI,iBAAiB;GACnB,MAAM,MAAM,QAAQ,QAAQ,gBAAgB;AAC5C,OAAI,QAAQ,GACV,cAAa,QAAQ,MAAM,GAAG,IAAI;;EAGtC,MAAM,SAAS,UAAU,KAAK,gBAAgB,MAAM,GAAG,QAAQ,EAAE,GAAG;EACpE,MAAM,cAAc,WAAW,SAAS;AACxC,eAAa,SACT,GAAG,SAAS,YAAY,MAAM,KAAK,MACnC,GAAG,YAAY,MAAM,KAAK;;AAGhC,eAAc,aAAa,WAAW;AACtC,GAAE,IAAI,QAAQ,WAAW,SAAS,KAAK,YAAY,CAAC,QAAQ,SAAS,OAAO,aAAa;;AAG3F,MAAa,qBAAqB,cAAc;CAC9C,MAAM;EAAE,MAAM;EAAY,aAAa;EAAgD;CACvF,MAAM,EACJ,KAAK;EACH,MAAM;EACN,aAAa;EACb,UAAU;EACX,EACF;CACD,MAAM,IAAI,EAAE,QAAQ;AAClB,QAAM,gBAAgB,KAAK,IAAI;;CAElC,CAAC"}
1
+ {"version":3,"file":"assemble.mjs","names":[],"sources":["../../src/commands/assemble.ts"],"sourcesContent":["/**\n * `skilld assemble` — merge pasted LLM output back into SKILL.md\n *\n * Auto-discovers skill directories with pending output files when run without arguments.\n */\n\nimport type { SkillSection } from '../agent/index.ts'\nimport { existsSync, readdirSync, readFileSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { defineCommand } from 'citty'\nimport { join, relative, resolve } from 'pathe'\nimport { cleanSectionOutput } from '../agent/clis/index.ts'\nimport {\n extractMarkedSections,\n getSectionValidator,\n SECTION_MERGE_ORDER,\n SECTION_OUTPUT_FILES,\n wrapSection,\n} from '../agent/index.ts'\nimport { iterateSkills } from '../core/skills.ts'\n\nconst OUTPUT_FILE_SET = new Set(Object.values(SECTION_OUTPUT_FILES))\n\n/**\n * Find installed skill dirs that have pending section output files.\n */\nfunction discoverSkillDirsWithOutputs(): string[] {\n const dirs: string[] = []\n for (const skill of iterateSkills({})) {\n if (readdirSync(skill.dir).some(f => OUTPUT_FILE_SET.has(f)))\n dirs.push(skill.dir)\n }\n return dirs\n}\n\nexport async function assembleCommand(dir: string | undefined): Promise<void> {\n const cwd = process.cwd()\n\n let dirs: string[]\n if (dir) {\n dirs = [resolve(cwd, dir)]\n }\n else {\n // Check cwd first — if it has SKILL.md + output files, use it\n if (existsSync(join(cwd, 'SKILL.md'))\n && readdirSync(cwd).some(f => OUTPUT_FILE_SET.has(f))) {\n dirs = [cwd]\n }\n else {\n dirs = discoverSkillDirsWithOutputs()\n if (dirs.length === 0) {\n p.log.error('No skill directories with output files found. Run `skilld add` first.')\n return\n }\n }\n }\n\n for (const targetDir of dirs)\n assembleDir(targetDir, cwd)\n}\n\nfunction assembleDir(targetDir: string, cwd: string): void {\n if (!existsSync(targetDir)) {\n p.log.error(`Directory not found: ${targetDir}`)\n return\n }\n\n const skillMdPath = join(targetDir, 'SKILL.md')\n if (!existsSync(skillMdPath)) {\n p.log.error(`No SKILL.md found in ${targetDir}`)\n return\n }\n\n const existingSkillMd = readFileSync(skillMdPath, 'utf-8')\n\n // Find and read section output files\n const sections: Array<{ section: SkillSection, content: string }> = []\n const warnings: string[] = []\n\n for (const [section, outputFile] of Object.entries(SECTION_OUTPUT_FILES) as Array<[SkillSection, string]>) {\n const filePath = join(targetDir, outputFile)\n if (!existsSync(filePath))\n continue\n\n const raw = readFileSync(filePath, 'utf-8').trim()\n if (!raw) {\n p.log.warn(`Empty file: ${outputFile}`)\n continue\n }\n\n const cleaned = cleanSectionOutput(raw)\n if (!cleaned) {\n const missing: string[] = []\n if (!/^##\\s/m.test(raw))\n missing.push('h2 heading (## ...)')\n if (!/^- (?:BREAKING|DEPRECATED|NEW): /m.test(raw))\n missing.push('change label (- BREAKING/DEPRECATED/NEW: ...)')\n if (!/\\[source\\]/.test(raw))\n missing.push('[source] link')\n p.log.warn(`${outputFile}: content rejected — missing ${missing.join(', ')}`)\n continue\n }\n\n const validator = getSectionValidator(section)\n if (validator) {\n for (const w of validator(cleaned))\n warnings.push(`${section}: ${w.warning}`)\n }\n\n sections.push({ section, content: cleaned })\n p.log.success(`Loaded ${outputFile}`)\n }\n\n if (sections.length === 0) {\n p.log.warn(`No section output files in ${relative(cwd, targetDir)}. Expected: ${Object.values(SECTION_OUTPUT_FILES).join(', ')}`)\n return\n }\n\n for (const w of warnings)\n p.log.warn(`\\x1B[33m${w}\\x1B[0m`)\n\n // Wrap each section with comment markers\n const wrappedSections: Array<{ section: SkillSection, wrapped: string }> = []\n for (const section of SECTION_MERGE_ORDER) {\n const result = sections.find(s => s.section === section)\n if (result)\n wrappedSections.push({ section, wrapped: wrapSection(section, result.content) })\n }\n\n // Try marker-based replacement first (re-assembly of previously assembled SKILL.md)\n const existingMarkers = extractMarkedSections(existingSkillMd)\n let newSkillMd: string\n\n if (existingMarkers.size > 0) {\n // Replace existing marked sections in-place, append new ones at the end\n newSkillMd = existingSkillMd\n // Process in reverse offset order to preserve indices\n const replacements = wrappedSections\n .filter(s => existingMarkers.has(s.section))\n .sort((a, b) => existingMarkers.get(b.section)!.start - existingMarkers.get(a.section)!.start)\n for (const { section, wrapped } of replacements) {\n const { start, end } = existingMarkers.get(section)!\n newSkillMd = newSkillMd.slice(0, start) + wrapped + newSkillMd.slice(end)\n }\n // Append sections that don't have existing markers\n const newSections = wrappedSections.filter(s => !existingMarkers.has(s.section))\n if (newSections.length > 0)\n newSkillMd = `${newSkillMd.trimEnd()}\\n\\n${newSections.map(s => s.wrapped).join('\\n\\n')}\\n`\n }\n else {\n // First assembly — find header boundary and append all sections\n const body = wrappedSections.map(s => s.wrapped).join('\\n\\n')\n const fmEnd = existingSkillMd.indexOf('\\n---\\n', 4)\n const afterFm = fmEnd !== -1 ? existingSkillMd.slice(fmEnd + 5) : existingSkillMd\n\n const firstLlmHeading = body.match(/^## .+$/m)?.[0]\n let headerPart = afterFm\n if (firstLlmHeading) {\n const idx = afterFm.indexOf(firstLlmHeading)\n if (idx !== -1)\n headerPart = afterFm.slice(0, idx)\n }\n\n const fmPart = fmEnd !== -1 ? existingSkillMd.slice(0, fmEnd + 5) : ''\n const cleanHeader = headerPart.trimEnd()\n newSkillMd = fmPart\n ? `${fmPart}${cleanHeader}\\n\\n${body}\\n`\n : `${cleanHeader}\\n\\n${body}\\n`\n }\n\n writeFileSync(skillMdPath, newSkillMd)\n p.log.success(`Updated ${relative(cwd, skillMdPath)} with ${sections.length} section(s)`)\n}\n\nexport const assembleCommandDef = defineCommand({\n meta: { name: 'assemble', description: 'Merge enhancement output files into SKILL.md' },\n args: {\n dir: {\n type: 'positional',\n description: 'Skill directory with output files (auto-discovers installed skills)',\n required: false,\n },\n },\n async run({ args }) {\n await assembleCommand(args.dir)\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;AAqBA,MAAM,kBAAkB,IAAI,IAAI,OAAO,OAAO,qBAAqB,CAAC;;;;AAKpE,SAAS,+BAAyC;CAChD,MAAM,OAAiB,EAAE;AACzB,MAAK,MAAM,SAAS,cAAc,EAAE,CAAC,CACnC,KAAI,YAAY,MAAM,IAAI,CAAC,MAAK,MAAK,gBAAgB,IAAI,EAAE,CAAC,CAC1D,MAAK,KAAK,MAAM,IAAI;AAExB,QAAO;;AAGT,eAAsB,gBAAgB,KAAwC;CAC5E,MAAM,MAAM,QAAQ,KAAK;CAEzB,IAAI;AACJ,KAAI,IACF,QAAO,CAAC,QAAQ,KAAK,IAAI,CAAC;UAItB,WAAW,KAAK,KAAK,WAAW,CAAC,IAChC,YAAY,IAAI,CAAC,MAAK,MAAK,gBAAgB,IAAI,EAAE,CAAC,CACrD,QAAO,CAAC,IAAI;MAET;AACH,SAAO,8BAA8B;AACrC,MAAI,KAAK,WAAW,GAAG;AACrB,KAAE,IAAI,MAAM,wEAAwE;AACpF;;;AAKN,MAAK,MAAM,aAAa,KACtB,aAAY,WAAW,IAAI;;AAG/B,SAAS,YAAY,WAAmB,KAAmB;AACzD,KAAI,CAAC,WAAW,UAAU,EAAE;AAC1B,IAAE,IAAI,MAAM,wBAAwB,YAAY;AAChD;;CAGF,MAAM,cAAc,KAAK,WAAW,WAAW;AAC/C,KAAI,CAAC,WAAW,YAAY,EAAE;AAC5B,IAAE,IAAI,MAAM,wBAAwB,YAAY;AAChD;;CAGF,MAAM,kBAAkB,aAAa,aAAa,QAAQ;CAG1D,MAAM,WAA8D,EAAE;CACtE,MAAM,WAAqB,EAAE;AAE7B,MAAK,MAAM,CAAC,SAAS,eAAe,OAAO,QAAQ,qBAAqB,EAAmC;EACzG,MAAM,WAAW,KAAK,WAAW,WAAW;AAC5C,MAAI,CAAC,WAAW,SAAS,CACvB;EAEF,MAAM,MAAM,aAAa,UAAU,QAAQ,CAAC,MAAM;AAClD,MAAI,CAAC,KAAK;AACR,KAAE,IAAI,KAAK,eAAe,aAAa;AACvC;;EAGF,MAAM,UAAU,mBAAmB,IAAI;AACvC,MAAI,CAAC,SAAS;GACZ,MAAM,UAAoB,EAAE;AAC5B,OAAI,CAAC,SAAS,KAAK,IAAI,CACrB,SAAQ,KAAK,sBAAsB;AACrC,OAAI,CAAC,oCAAoC,KAAK,IAAI,CAChD,SAAQ,KAAK,gDAAgD;AAC/D,OAAI,CAAC,aAAa,KAAK,IAAI,CACzB,SAAQ,KAAK,gBAAgB;AAC/B,KAAE,IAAI,KAAK,GAAG,WAAW,+BAA+B,QAAQ,KAAK,KAAK,GAAG;AAC7E;;EAGF,MAAM,YAAY,oBAAoB,QAAQ;AAC9C,MAAI,UACF,MAAK,MAAM,KAAK,UAAU,QAAQ,CAChC,UAAS,KAAK,GAAG,QAAQ,IAAI,EAAE,UAAU;AAG7C,WAAS,KAAK;GAAE;GAAS,SAAS;GAAS,CAAC;AAC5C,IAAE,IAAI,QAAQ,UAAU,aAAa;;AAGvC,KAAI,SAAS,WAAW,GAAG;AACzB,IAAE,IAAI,KAAK,8BAA8B,SAAS,KAAK,UAAU,CAAC,cAAc,OAAO,OAAO,qBAAqB,CAAC,KAAK,KAAK,GAAG;AACjI;;AAGF,MAAK,MAAM,KAAK,SACd,GAAE,IAAI,KAAK,WAAW,EAAE,SAAS;CAGnC,MAAM,kBAAqE,EAAE;AAC7E,MAAK,MAAM,WAAW,qBAAqB;EACzC,MAAM,SAAS,SAAS,MAAK,MAAK,EAAE,YAAY,QAAQ;AACxD,MAAI,OACF,iBAAgB,KAAK;GAAE;GAAS,SAAS,YAAY,SAAS,OAAO,QAAA;GAAU,CAAC;;CAIpF,MAAM,kBAAkB,sBAAsB,gBAAgB;CAC9D,IAAI;AAEJ,KAAI,gBAAgB,OAAO,GAAG;AAE5B,eAAa;EAEb,MAAM,eAAe,gBAClB,QAAO,MAAK,gBAAgB,IAAI,EAAE,QAAQ,CAAC,CAC3C,MAAM,GAAG,MAAM,gBAAgB,IAAI,EAAE,QAAQ,CAAE,QAAQ,gBAAgB,IAAI,EAAE,QAAQ,CAAE,MAAM;AAChG,OAAK,MAAM,EAAE,SAAS,aAAa,cAAc;GAC/C,MAAM,EAAE,OAAO,QAAQ,gBAAgB,IAAI,QAAQ;AACnD,gBAAa,WAAW,MAAM,GAAG,MAAM,GAAG,UAAU,WAAW,MAAM,IAAI;;EAG3E,MAAM,cAAc,gBAAgB,QAAO,MAAK,CAAC,gBAAgB,IAAI,EAAE,QAAQ,CAAC;AAChF,MAAI,YAAY,SAAS,EACvB,cAAa,GAAG,WAAW,SAAS,CAAC,MAAM,YAAY,KAAI,MAAK,EAAE,QAAQ,CAAC,KAAK,OAAO,CAAC;QAEvF;EAEH,MAAM,OAAO,gBAAgB,KAAI,MAAK,EAAE,QAAQ,CAAC,KAAK,OAAO;EAC7D,MAAM,QAAQ,gBAAgB,QAAQ,WAAW,EAAE;EACnD,MAAM,UAAU,UAAU,KAAK,gBAAgB,MAAM,QAAQ,EAAE,GAAG;EAElE,MAAM,kBAAkB,KAAK,MAAM,WAAW,GAAG;EACjD,IAAI,aAAa;AACjB,MAAI,iBAAiB;GACnB,MAAM,MAAM,QAAQ,QAAQ,gBAAgB;AAC5C,OAAI,QAAQ,GACV,cAAa,QAAQ,MAAM,GAAG,IAAI;;EAGtC,MAAM,SAAS,UAAU,KAAK,gBAAgB,MAAM,GAAG,QAAQ,EAAE,GAAG;EACpE,MAAM,cAAc,WAAW,SAAS;AACxC,eAAa,SACT,GAAG,SAAS,YAAY,MAAM,KAAK,MACnC,GAAG,YAAY,MAAM,KAAK;;AAGhC,eAAc,aAAa,WAAW;AACtC,GAAE,IAAI,QAAQ,WAAW,SAAS,KAAK,YAAY,CAAC,QAAQ,SAAS,OAAO,aAAa;;AAG3F,MAAa,qBAAqB,cAAc;CAC9C,MAAM;EAAE,MAAM;EAAY,aAAa;EAAgD;CACvF,MAAM,EACJ,KAAK;EACH,MAAM;EACN,aAAa;EACb,UAAU;EACX,EACF;CACD,MAAM,IAAI,EAAE,QAAQ;AAClB,QAAM,gBAAgB,KAAK,IAAI;;CAElC,CAAC"}
@@ -0,0 +1,478 @@
1
+ import { a as getModelLabel } from "./agent.mjs";
2
+ import { o as getCacheDir } from "./config.mjs";
3
+ import { n as sanitizeMarkdown } from "./sanitize.mjs";
4
+ import { r as ensureCacheDir, y as writeToCache } from "./cache.mjs";
5
+ import "./yaml.mjs";
6
+ import "./markdown.mjs";
7
+ import "./shared.mjs";
8
+ import { B as isGhAvailable, F as formatDiscussionAsMarkdown, I as generateDiscussionIndex, L as fetchGitHubIssues, P as fetchGitHubDiscussions, R as formatIssueAsMarkdown, c as readLocalPackageInfo, tt as parseGitHubUrl, z as generateIssueIndex } from "./sources.mjs";
9
+ import "./detect.mjs";
10
+ import { n as computeSkillDirName, t as generateSkillMd } from "./prompts.mjs";
11
+ import { E as readConfig, S as defaultFeatures, b as appendToJsonArray, s as guard, x as patchPackageJson } from "./cli-helpers.mjs";
12
+ import "./lockfile.mjs";
13
+ import { l as timedSpinner } from "./formatting.mjs";
14
+ import { S as writePromptFiles, _ as linkAllReferences, b as selectLlmConfig, c as ejectReferences, l as enhanceSkillWithLLM, m as forceClearCache, o as detectChangelog } from "./sync-shared2.mjs";
15
+ import { join, relative, resolve } from "pathe";
16
+ import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
17
+ import * as p from "@clack/prompts";
18
+ import { defineCommand } from "citty";
19
+ //#region src/commands/author.ts
20
+ const QUOTE_PREFIX_RE = /^['"]/;
21
+ const QUOTE_SUFFIX_RE = /['"]$/;
22
+ function detectMonorepoPackages(cwd) {
23
+ const pkgPath = join(cwd, "package.json");
24
+ if (!existsSync(pkgPath)) return null;
25
+ const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
26
+ if (!pkg.private) return null;
27
+ let patterns = [];
28
+ if (Array.isArray(pkg.workspaces)) patterns = pkg.workspaces;
29
+ else if (pkg.workspaces?.packages) patterns = pkg.workspaces.packages;
30
+ if (patterns.length === 0) {
31
+ const pnpmWs = join(cwd, "pnpm-workspace.yaml");
32
+ if (existsSync(pnpmWs)) {
33
+ const lines = readFileSync(pnpmWs, "utf-8").split("\n");
34
+ for (const line of lines) {
35
+ const trimmed = line.trim();
36
+ if (!trimmed.startsWith("-")) continue;
37
+ const value = trimmed.slice(1).trim().replace(QUOTE_PREFIX_RE, "").replace(QUOTE_SUFFIX_RE, "");
38
+ if (value) patterns.push(value);
39
+ }
40
+ }
41
+ }
42
+ if (patterns.length === 0) return null;
43
+ const packages = [];
44
+ for (const pattern of patterns) {
45
+ const scanDir = resolve(cwd, pattern.replace(/\/?\*+$/, ""));
46
+ if (!existsSync(scanDir)) continue;
47
+ for (const entry of readdirSync(scanDir, { withFileTypes: true })) {
48
+ if (!entry.isDirectory()) continue;
49
+ const pkgJsonPath = join(scanDir, entry.name, "package.json");
50
+ if (!existsSync(pkgJsonPath)) continue;
51
+ const childPkg = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
52
+ if (childPkg.private) continue;
53
+ if (!childPkg.name) continue;
54
+ const repoUrl = typeof childPkg.repository === "string" ? childPkg.repository : childPkg.repository?.url?.replace(/^git\+/, "").replace(/\.git$/, "");
55
+ packages.push({
56
+ name: childPkg.name,
57
+ version: childPkg.version || "0.0.0",
58
+ description: childPkg.description,
59
+ repoUrl,
60
+ dir: join(scanDir, entry.name)
61
+ });
62
+ }
63
+ }
64
+ return packages.length > 0 ? packages : null;
65
+ }
66
+ function walkMarkdownFiles(dir, base = "") {
67
+ const results = [];
68
+ if (!existsSync(dir)) return results;
69
+ for (const entry of readdirSync(dir, { withFileTypes: true })) {
70
+ const rel = base ? `${base}/${entry.name}` : entry.name;
71
+ const full = join(dir, entry.name);
72
+ if (entry.isDirectory()) results.push(...walkMarkdownFiles(full, rel));
73
+ else if (/\.mdx?$/.test(entry.name)) results.push({
74
+ path: rel,
75
+ content: readFileSync(full, "utf-8")
76
+ });
77
+ }
78
+ return results;
79
+ }
80
+ /**
81
+ * Resolve docs from local filesystem. Cascade:
82
+ * 1. Package-level docs/ directory
83
+ * 2. Monorepo-root docs/ directory (if monorepoRoot provided)
84
+ * 3. Monorepo-root docs/content/ (Nuxt Content convention)
85
+ * 4. llms.txt in package dir
86
+ * 5. README.md in package dir
87
+ */
88
+ function resolveLocalDocs(packageDir, packageName, version, monorepoRoot) {
89
+ const cachedDocs = [];
90
+ const cacheChangelog = () => cacheLocalChangelog(packageDir, packageName, version, monorepoRoot);
91
+ const docsDir = join(packageDir, "docs");
92
+ if (existsSync(docsDir)) {
93
+ const mdFiles = walkMarkdownFiles(docsDir);
94
+ if (mdFiles.length > 0) {
95
+ for (const f of mdFiles) cachedDocs.push({
96
+ path: `docs/${f.path}`,
97
+ content: sanitizeMarkdown(f.content)
98
+ });
99
+ writeToCache(packageName, version, cachedDocs);
100
+ cacheChangelog();
101
+ return {
102
+ docsType: "docs",
103
+ docSource: `local docs/ (${mdFiles.length} files)`
104
+ };
105
+ }
106
+ }
107
+ if (monorepoRoot) for (const candidate of ["docs/content", "docs"]) {
108
+ const rootDocsDir = join(monorepoRoot, candidate);
109
+ if (existsSync(rootDocsDir)) {
110
+ const mdFiles = walkMarkdownFiles(rootDocsDir);
111
+ if (mdFiles.length > 0) {
112
+ for (const f of mdFiles) cachedDocs.push({
113
+ path: `docs/${f.path}`,
114
+ content: sanitizeMarkdown(f.content)
115
+ });
116
+ writeToCache(packageName, version, cachedDocs);
117
+ cacheChangelog();
118
+ return {
119
+ docsType: "docs",
120
+ docSource: `monorepo ${candidate}/ (${mdFiles.length} files)`
121
+ };
122
+ }
123
+ }
124
+ }
125
+ for (const dir of [packageDir, monorepoRoot].filter(Boolean)) {
126
+ const llmsPath = join(dir, "llms.txt");
127
+ if (existsSync(llmsPath)) {
128
+ cachedDocs.push({
129
+ path: "llms.txt",
130
+ content: sanitizeMarkdown(readFileSync(llmsPath, "utf-8"))
131
+ });
132
+ writeToCache(packageName, version, cachedDocs);
133
+ cacheChangelog();
134
+ return {
135
+ docsType: "llms.txt",
136
+ docSource: dir === packageDir ? "local llms.txt" : "monorepo llms.txt"
137
+ };
138
+ }
139
+ }
140
+ for (const dir of [packageDir, monorepoRoot].filter(Boolean)) {
141
+ const readmeFile = readdirSync(dir).find((f) => /^readme\.md$/i.test(f));
142
+ if (readmeFile) {
143
+ cachedDocs.push({
144
+ path: "docs/README.md",
145
+ content: sanitizeMarkdown(readFileSync(join(dir, readmeFile), "utf-8"))
146
+ });
147
+ writeToCache(packageName, version, cachedDocs);
148
+ cacheChangelog();
149
+ return {
150
+ docsType: "readme",
151
+ docSource: dir === packageDir ? "local README.md" : "monorepo README.md"
152
+ };
153
+ }
154
+ }
155
+ cacheChangelog();
156
+ return {
157
+ docsType: "readme",
158
+ docSource: "none"
159
+ };
160
+ }
161
+ function cacheLocalChangelog(dir, packageName, version, monorepoRoot) {
162
+ const candidates = ["CHANGELOG.md", "changelog.md"];
163
+ const changelogFile = candidates.find((f) => existsSync(join(dir, f))) || (monorepoRoot ? candidates.find((f) => existsSync(join(monorepoRoot, f))) : void 0);
164
+ const changelogDir = changelogFile && existsSync(join(dir, changelogFile)) ? dir : monorepoRoot;
165
+ if (changelogFile && changelogDir) writeToCache(packageName, version, [{
166
+ path: `releases/${changelogFile}`,
167
+ content: sanitizeMarkdown(readFileSync(join(changelogDir, changelogFile), "utf-8"))
168
+ }]);
169
+ }
170
+ async function fetchRemoteSupplements(opts) {
171
+ const { packageName, version, repoUrl, features, onProgress } = opts;
172
+ if (!repoUrl || !isGhAvailable()) return {
173
+ hasIssues: false,
174
+ hasDiscussions: false
175
+ };
176
+ const gh = parseGitHubUrl(repoUrl);
177
+ if (!gh) return {
178
+ hasIssues: false,
179
+ hasDiscussions: false
180
+ };
181
+ const cacheDir = getCacheDir(packageName, version);
182
+ let hasIssues = false;
183
+ const issuesDir = join(cacheDir, "issues");
184
+ if (features.issues && !existsSync(issuesDir)) {
185
+ onProgress("Fetching issues via GitHub API");
186
+ const issues = await fetchGitHubIssues(gh.owner, gh.repo, 30).catch(() => []);
187
+ if (issues.length > 0) {
188
+ onProgress(`Caching ${issues.length} issues`);
189
+ writeToCache(packageName, version, issues.map((issue) => ({
190
+ path: `issues/issue-${issue.number}.md`,
191
+ content: formatIssueAsMarkdown(issue)
192
+ })));
193
+ writeToCache(packageName, version, [{
194
+ path: "issues/_INDEX.md",
195
+ content: generateIssueIndex(issues)
196
+ }]);
197
+ hasIssues = true;
198
+ }
199
+ } else hasIssues = features.issues && existsSync(issuesDir);
200
+ let hasDiscussions = false;
201
+ const discussionsDir = join(cacheDir, "discussions");
202
+ if (features.discussions && !existsSync(discussionsDir)) {
203
+ onProgress("Fetching discussions via GitHub API");
204
+ const discussions = await fetchGitHubDiscussions(gh.owner, gh.repo, 20).catch(() => []);
205
+ if (discussions.length > 0) {
206
+ onProgress(`Caching ${discussions.length} discussions`);
207
+ writeToCache(packageName, version, discussions.map((d) => ({
208
+ path: `discussions/discussion-${d.number}.md`,
209
+ content: formatDiscussionAsMarkdown(d)
210
+ })));
211
+ writeToCache(packageName, version, [{
212
+ path: "discussions/_INDEX.md",
213
+ content: generateDiscussionIndex(discussions)
214
+ }]);
215
+ hasDiscussions = true;
216
+ }
217
+ } else hasDiscussions = features.discussions && existsSync(discussionsDir);
218
+ return {
219
+ hasIssues,
220
+ hasDiscussions
221
+ };
222
+ }
223
+ function patchPackageJsonFiles(packageDir) {
224
+ const pkgPath = join(packageDir, "package.json");
225
+ if (!existsSync(pkgPath)) return;
226
+ if (patchPackageJson(pkgPath, (raw, pkg) => {
227
+ if (!Array.isArray(pkg.files)) {
228
+ p.log.warn("No `files` array in package.json. Add `\"skills\"` to your files array manually.");
229
+ return null;
230
+ }
231
+ if (pkg.files.some((f) => f === "skills" || f === "skills/" || f === "skills/**")) return null;
232
+ return appendToJsonArray(raw, ["files"], "skills");
233
+ })) p.log.success("Added `\"skills\"` to package.json files array");
234
+ }
235
+ async function authorSinglePackage(opts) {
236
+ const { packageDir, packageName, version } = opts;
237
+ const spin = timedSpinner();
238
+ const sanitizedName = computeSkillDirName(packageName);
239
+ const outDir = opts.out ? resolve(packageDir, opts.out) : join(packageDir, "skills", sanitizedName);
240
+ if (opts.out) {
241
+ const rel = relative(packageDir, outDir);
242
+ if (!rel || rel === "." || rel.startsWith("..")) {
243
+ p.log.error("--out must point to a child directory, not the package root or a parent");
244
+ return null;
245
+ }
246
+ }
247
+ if (existsSync(outDir)) rmSync(outDir, {
248
+ recursive: true,
249
+ force: true
250
+ });
251
+ mkdirSync(outDir, { recursive: true });
252
+ if (opts.force) forceClearCache(packageName, version);
253
+ ensureCacheDir();
254
+ const features = readConfig().features ?? defaultFeatures;
255
+ spin.start("Resolving local docs");
256
+ const { docsType, docSource } = resolveLocalDocs(packageDir, packageName, version, opts.monorepoRoot);
257
+ spin.stop(`Resolved docs: ${docSource}`);
258
+ const supSpin = timedSpinner();
259
+ supSpin.start("Checking remote supplements");
260
+ const { hasIssues, hasDiscussions } = await fetchRemoteSupplements({
261
+ packageName,
262
+ version,
263
+ repoUrl: opts.repoUrl,
264
+ features,
265
+ onProgress: (msg) => supSpin.message(msg)
266
+ });
267
+ const supParts = [];
268
+ if (hasIssues) supParts.push("issues");
269
+ if (hasDiscussions) supParts.push("discussions");
270
+ supSpin.stop(supParts.length > 0 ? `Fetched ${supParts.join(", ")}` : "No remote supplements");
271
+ linkAllReferences(outDir, packageName, packageDir, version, docsType, void 0, features);
272
+ const cacheDir = getCacheDir(packageName, version);
273
+ const hasChangelog = detectChangelog(packageDir, cacheDir);
274
+ const hasReleases = existsSync(join(cacheDir, "releases"));
275
+ const baseSkillMd = generateSkillMd({
276
+ name: packageName,
277
+ version,
278
+ description: opts.description,
279
+ relatedSkills: [],
280
+ hasIssues,
281
+ hasDiscussions,
282
+ hasReleases,
283
+ hasChangelog,
284
+ docsType,
285
+ hasShippedDocs: false,
286
+ pkgFiles: [],
287
+ dirName: sanitizedName,
288
+ repoUrl: opts.repoUrl,
289
+ features,
290
+ eject: true
291
+ });
292
+ writeFileSync(join(outDir, "SKILL.md"), baseSkillMd);
293
+ p.log.success(`Created base skill: ${relative(packageDir, outDir)}`);
294
+ const skilldDir = join(outDir, ".skilld");
295
+ try {
296
+ const llmConfig = opts.llmConfig;
297
+ if (llmConfig?.promptOnly) writePromptFiles({
298
+ packageName,
299
+ skillDir: outDir,
300
+ version,
301
+ hasIssues,
302
+ hasDiscussions,
303
+ hasReleases,
304
+ hasChangelog,
305
+ docsType,
306
+ hasShippedDocs: false,
307
+ pkgFiles: [],
308
+ sections: llmConfig.sections,
309
+ customPrompt: llmConfig.customPrompt,
310
+ features
311
+ });
312
+ else if (llmConfig) {
313
+ p.log.step(getModelLabel(llmConfig.model));
314
+ await enhanceSkillWithLLM({
315
+ packageName,
316
+ version,
317
+ skillDir: outDir,
318
+ dirName: sanitizedName,
319
+ model: llmConfig.model,
320
+ resolved: { repoUrl: opts.repoUrl },
321
+ relatedSkills: [],
322
+ hasIssues,
323
+ hasDiscussions,
324
+ hasReleases,
325
+ hasChangelog,
326
+ docsType,
327
+ hasShippedDocs: false,
328
+ pkgFiles: [],
329
+ force: opts.force,
330
+ debug: opts.debug,
331
+ sections: llmConfig.sections,
332
+ customPrompt: llmConfig.customPrompt,
333
+ features,
334
+ eject: true
335
+ });
336
+ }
337
+ ejectReferences(outDir, packageName, packageDir, version, docsType, features);
338
+ } finally {
339
+ if (existsSync(skilldDir)) rmSync(skilldDir, {
340
+ recursive: true,
341
+ force: true
342
+ });
343
+ }
344
+ const relOut = relative(packageDir, outDir);
345
+ if (relOut === "skills" || relOut.startsWith("skills/")) patchPackageJsonFiles(packageDir);
346
+ else if (opts.out) p.log.info("Output is outside skills/, skipping package.json patch. Add the path to \"files\" manually if publishing.");
347
+ return outDir;
348
+ }
349
+ async function resolveLlmConfig(model, yes) {
350
+ if (readConfig().skipLlm || yes && !model) return void 0;
351
+ return selectLlmConfig(model);
352
+ }
353
+ async function authorCommand(opts) {
354
+ const cwd = process.cwd();
355
+ const monoPackages = detectMonorepoPackages(cwd);
356
+ if (monoPackages && monoPackages.length > 0) {
357
+ p.intro(`\x1B[1m\x1B[35mskilld\x1B[0m author \x1B[90m(monorepo: ${monoPackages.length} packages)\x1B[0m`);
358
+ if (opts.out) {
359
+ p.log.error("--out is not supported in monorepo mode (each package gets its own skills/ directory)");
360
+ return;
361
+ }
362
+ const selected = guard(await p.multiselect({
363
+ message: "Which packages should ship skills?",
364
+ options: monoPackages.map((pkg) => ({
365
+ label: pkg.name,
366
+ value: pkg,
367
+ hint: pkg.description
368
+ }))
369
+ }));
370
+ if (selected.length === 0) return;
371
+ const llmConfig = await resolveLlmConfig(opts.model, opts.yes);
372
+ const rootPkgPath = join(cwd, "package.json");
373
+ const rootPkg = JSON.parse(readFileSync(rootPkgPath, "utf-8"));
374
+ const rootRepoUrl = typeof rootPkg.repository === "string" ? rootPkg.repository : rootPkg.repository?.url?.replace(/^git\+/, "").replace(/\.git$/, "");
375
+ const results = [];
376
+ for (const pkg of selected) {
377
+ p.log.step(`\x1B[36m${pkg.name}\x1B[0m@${pkg.version}`);
378
+ const outDir = await authorSinglePackage({
379
+ packageDir: pkg.dir,
380
+ packageName: pkg.name,
381
+ version: pkg.version,
382
+ description: pkg.description,
383
+ repoUrl: pkg.repoUrl || rootRepoUrl,
384
+ monorepoRoot: cwd,
385
+ llmConfig,
386
+ force: opts.force,
387
+ debug: opts.debug
388
+ });
389
+ if (outDir) results.push({
390
+ name: pkg.name,
391
+ outDir
392
+ });
393
+ }
394
+ if (results.length > 0) {
395
+ p.log.message("");
396
+ for (const { name, outDir } of results) p.log.success(`${name} → ${relative(cwd, outDir)}`);
397
+ printConsumerGuidance(results.map((r) => r.name));
398
+ }
399
+ p.outro("Done");
400
+ return;
401
+ }
402
+ const pkgInfo = readLocalPackageInfo(cwd);
403
+ if (!pkgInfo) {
404
+ p.log.error("No package.json found in current directory");
405
+ return;
406
+ }
407
+ const { name: packageName, version, repoUrl } = pkgInfo;
408
+ p.intro(`\x1B[1m\x1B[35mskilld\x1B[0m author \x1B[36m${packageName}\x1B[0m@${version}`);
409
+ const llmConfig = await resolveLlmConfig(opts.model, opts.yes);
410
+ const outDir = await authorSinglePackage({
411
+ packageDir: cwd,
412
+ packageName,
413
+ version,
414
+ description: pkgInfo.description,
415
+ repoUrl,
416
+ out: opts.out,
417
+ llmConfig,
418
+ force: opts.force,
419
+ debug: opts.debug
420
+ });
421
+ if (outDir) {
422
+ printConsumerGuidance([packageName]);
423
+ p.outro(`Authored skill to ${relative(cwd, outDir)}`);
424
+ }
425
+ }
426
+ function printConsumerGuidance(packageNames) {
427
+ const names = packageNames.join(", ");
428
+ p.log.info(`\x1B[90mConsumers get ${packageNames.length > 1 ? "these skills" : "this skill"} automatically:\x1B[0m\n \x1B[90m1. Install ${names} as a dependency\x1B[0m\n \x1B[90m2. Run \x1B[36mskilld prepare\x1B[90m (or add to package.json: \x1B[36m"prepare": "skilld prepare"\x1B[90m)\x1B[0m`);
429
+ }
430
+ const authorCommandDef = defineCommand({
431
+ meta: {
432
+ name: "author",
433
+ description: "Generate portable skill for npm publishing"
434
+ },
435
+ args: {
436
+ out: {
437
+ type: "string",
438
+ alias: "o",
439
+ description: "Output directory (default: ./skills/<name>/)"
440
+ },
441
+ model: {
442
+ type: "string",
443
+ alias: "m",
444
+ description: "Enhancement model for SKILL.md generation",
445
+ valueHint: "id"
446
+ },
447
+ yes: {
448
+ type: "boolean",
449
+ alias: "y",
450
+ description: "Skip prompts, use defaults",
451
+ default: false
452
+ },
453
+ force: {
454
+ type: "boolean",
455
+ alias: "f",
456
+ description: "Clear cache and regenerate",
457
+ default: false
458
+ },
459
+ debug: {
460
+ type: "boolean",
461
+ description: "Save raw enhancement output to logs/",
462
+ default: false
463
+ }
464
+ },
465
+ async run({ args }) {
466
+ await authorCommand({
467
+ out: args.out,
468
+ model: args.model,
469
+ yes: args.yes,
470
+ force: args.force,
471
+ debug: args.debug
472
+ });
473
+ }
474
+ });
475
+ //#endregion
476
+ export { authorCommandDef };
477
+
478
+ //# sourceMappingURL=author.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"author.mjs","names":[],"sources":["../../src/commands/author.ts"],"sourcesContent":["import type { OptimizeModel } from '../agent/index.ts'\nimport type { FeaturesConfig } from '../core/config.ts'\nimport type { LlmConfig } from './sync-shared.ts'\nimport { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { defineCommand } from 'citty'\nimport { join, relative, resolve } from 'pathe'\nimport {\n computeSkillDirName,\n generateSkillMd,\n getModelLabel,\n} from '../agent/index.ts'\nimport {\n ensureCacheDir,\n getCacheDir,\n writeToCache,\n} from '../cache/index.ts'\nimport { guard } from '../cli-helpers.ts'\nimport { defaultFeatures, readConfig } from '../core/config.ts'\nimport { timedSpinner } from '../core/formatting.ts'\nimport { appendToJsonArray, patchPackageJson } from '../core/package-json.ts'\nimport { sanitizeMarkdown } from '../core/sanitize.ts'\nimport {\n fetchGitHubDiscussions,\n fetchGitHubIssues,\n formatDiscussionAsMarkdown,\n formatIssueAsMarkdown,\n generateDiscussionIndex,\n generateIssueIndex,\n isGhAvailable,\n parseGitHubUrl,\n readLocalPackageInfo,\n} from '../sources/index.ts'\nimport {\n detectChangelog,\n ejectReferences,\n enhanceSkillWithLLM,\n forceClearCache,\n linkAllReferences,\n selectLlmConfig,\n writePromptFiles,\n} from './sync-shared.ts'\n\nconst QUOTE_PREFIX_RE = /^['\"]/\nconst QUOTE_SUFFIX_RE = /['\"]$/\n\n// ── Monorepo detection ──\n\nexport interface MonorepoPackage {\n name: string\n version: string\n description?: string\n repoUrl?: string\n dir: string\n}\n\nexport function detectMonorepoPackages(cwd: string): MonorepoPackage[] | null {\n const pkgPath = join(cwd, 'package.json')\n if (!existsSync(pkgPath))\n return null\n\n const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'))\n\n // Must be private (monorepo root) with workspaces or pnpm-workspace.yaml\n if (!pkg.private)\n return null\n\n let patterns: string[] = []\n\n if (Array.isArray(pkg.workspaces)) {\n patterns = pkg.workspaces\n }\n else if (pkg.workspaces?.packages) {\n patterns = pkg.workspaces.packages\n }\n\n // Check pnpm-workspace.yaml\n if (patterns.length === 0) {\n const pnpmWs = join(cwd, 'pnpm-workspace.yaml')\n if (existsSync(pnpmWs)) {\n const lines = readFileSync(pnpmWs, 'utf-8').split('\\n')\n for (const line of lines) {\n const trimmed = line.trim()\n if (!trimmed.startsWith('-'))\n continue\n const value = trimmed.slice(1).trim().replace(QUOTE_PREFIX_RE, '').replace(QUOTE_SUFFIX_RE, '')\n if (value)\n patterns.push(value)\n }\n }\n }\n\n if (patterns.length === 0)\n return null\n\n const packages: MonorepoPackage[] = []\n\n for (const pattern of patterns) {\n // Expand simple glob: \"packages/*\" → scan packages/*/package.json\n const base = pattern.replace(/\\/?\\*+$/, '')\n const scanDir = resolve(cwd, base)\n if (!existsSync(scanDir))\n continue\n\n for (const entry of readdirSync(scanDir, { withFileTypes: true })) {\n if (!entry.isDirectory())\n continue\n const pkgJsonPath = join(scanDir, entry.name, 'package.json')\n if (!existsSync(pkgJsonPath))\n continue\n\n const childPkg = JSON.parse(readFileSync(pkgJsonPath, 'utf-8'))\n if (childPkg.private)\n continue\n if (!childPkg.name)\n continue\n\n const repoUrl = typeof childPkg.repository === 'string'\n ? childPkg.repository\n : childPkg.repository?.url?.replace(/^git\\+/, '').replace(/\\.git$/, '')\n\n packages.push({\n name: childPkg.name,\n version: childPkg.version || '0.0.0',\n description: childPkg.description,\n repoUrl,\n dir: join(scanDir, entry.name),\n })\n }\n }\n\n return packages.length > 0 ? packages : null\n}\n\n// ── Docs resolution ──\n\nfunction walkMarkdownFiles(dir: string, base = ''): Array<{ path: string, content: string }> {\n const results: Array<{ path: string, content: string }> = []\n if (!existsSync(dir))\n return results\n\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const rel = base ? `${base}/${entry.name}` : entry.name\n const full = join(dir, entry.name)\n if (entry.isDirectory()) {\n results.push(...walkMarkdownFiles(full, rel))\n }\n else if (/\\.mdx?$/.test(entry.name)) {\n results.push({ path: rel, content: readFileSync(full, 'utf-8') })\n }\n }\n return results\n}\n\n/**\n * Resolve docs from local filesystem. Cascade:\n * 1. Package-level docs/ directory\n * 2. Monorepo-root docs/ directory (if monorepoRoot provided)\n * 3. Monorepo-root docs/content/ (Nuxt Content convention)\n * 4. llms.txt in package dir\n * 5. README.md in package dir\n */\nfunction resolveLocalDocs(\n packageDir: string,\n packageName: string,\n version: string,\n monorepoRoot?: string,\n): { docsType: 'docs' | 'llms.txt' | 'readme', docSource: string } {\n const cachedDocs: Array<{ path: string, content: string }> = []\n\n const cacheChangelog = () => cacheLocalChangelog(packageDir, packageName, version, monorepoRoot)\n\n // 1. Package-level docs/\n const docsDir = join(packageDir, 'docs')\n if (existsSync(docsDir)) {\n const mdFiles = walkMarkdownFiles(docsDir)\n if (mdFiles.length > 0) {\n for (const f of mdFiles)\n cachedDocs.push({ path: `docs/${f.path}`, content: sanitizeMarkdown(f.content) })\n writeToCache(packageName, version, cachedDocs)\n cacheChangelog()\n return { docsType: 'docs', docSource: `local docs/ (${mdFiles.length} files)` }\n }\n }\n\n // 2. Monorepo-root docs/ or docs/content/\n if (monorepoRoot) {\n for (const candidate of ['docs/content', 'docs']) {\n const rootDocsDir = join(monorepoRoot, candidate)\n if (existsSync(rootDocsDir)) {\n const mdFiles = walkMarkdownFiles(rootDocsDir)\n if (mdFiles.length > 0) {\n for (const f of mdFiles)\n cachedDocs.push({ path: `docs/${f.path}`, content: sanitizeMarkdown(f.content) })\n writeToCache(packageName, version, cachedDocs)\n cacheChangelog()\n return { docsType: 'docs', docSource: `monorepo ${candidate}/ (${mdFiles.length} files)` }\n }\n }\n }\n }\n\n // 3. llms.txt (package dir, then monorepo root)\n for (const dir of [packageDir, monorepoRoot].filter(Boolean) as string[]) {\n const llmsPath = join(dir, 'llms.txt')\n if (existsSync(llmsPath)) {\n cachedDocs.push({ path: 'llms.txt', content: sanitizeMarkdown(readFileSync(llmsPath, 'utf-8')) })\n writeToCache(packageName, version, cachedDocs)\n cacheChangelog()\n const source = dir === packageDir ? 'local llms.txt' : 'monorepo llms.txt'\n return { docsType: 'llms.txt', docSource: source }\n }\n }\n\n // 4. README.md (package dir, then monorepo root)\n for (const dir of [packageDir, monorepoRoot].filter(Boolean) as string[]) {\n const readmeFile = readdirSync(dir).find(f => /^readme\\.md$/i.test(f))\n if (readmeFile) {\n cachedDocs.push({ path: 'docs/README.md', content: sanitizeMarkdown(readFileSync(join(dir, readmeFile), 'utf-8')) })\n writeToCache(packageName, version, cachedDocs)\n cacheChangelog()\n const source = dir === packageDir ? 'local README.md' : 'monorepo README.md'\n return { docsType: 'readme', docSource: source }\n }\n }\n\n cacheChangelog()\n return { docsType: 'readme', docSource: 'none' }\n}\n\nfunction cacheLocalChangelog(dir: string, packageName: string, version: string, monorepoRoot?: string): void {\n const candidates = ['CHANGELOG.md', 'changelog.md']\n const changelogFile = candidates.find(f => existsSync(join(dir, f)))\n || (monorepoRoot ? candidates.find(f => existsSync(join(monorepoRoot, f))) : undefined)\n const changelogDir = changelogFile && existsSync(join(dir, changelogFile)) ? dir : monorepoRoot\n if (changelogFile && changelogDir) {\n writeToCache(packageName, version, [{\n path: `releases/${changelogFile}`,\n content: sanitizeMarkdown(readFileSync(join(changelogDir, changelogFile), 'utf-8')),\n }])\n }\n}\n\n// ── Remote supplements ──\n\nasync function fetchRemoteSupplements(opts: {\n packageName: string\n version: string\n repoUrl?: string\n features: FeaturesConfig\n onProgress: (msg: string) => void\n}): Promise<{ hasIssues: boolean, hasDiscussions: boolean }> {\n const { packageName, version, repoUrl, features, onProgress } = opts\n\n if (!repoUrl || !isGhAvailable())\n return { hasIssues: false, hasDiscussions: false }\n\n const gh = parseGitHubUrl(repoUrl)\n if (!gh)\n return { hasIssues: false, hasDiscussions: false }\n\n const cacheDir = getCacheDir(packageName, version)\n\n let hasIssues = false\n const issuesDir = join(cacheDir, 'issues')\n if (features.issues && !existsSync(issuesDir)) {\n onProgress('Fetching issues via GitHub API')\n const issues = await fetchGitHubIssues(gh.owner, gh.repo, 30).catch(() => [])\n if (issues.length > 0) {\n onProgress(`Caching ${issues.length} issues`)\n writeToCache(packageName, version, issues.map(issue => ({\n path: `issues/issue-${issue.number}.md`,\n content: formatIssueAsMarkdown(issue),\n })))\n writeToCache(packageName, version, [{\n path: 'issues/_INDEX.md',\n content: generateIssueIndex(issues),\n }])\n hasIssues = true\n }\n }\n else {\n hasIssues = features.issues && existsSync(issuesDir)\n }\n\n let hasDiscussions = false\n const discussionsDir = join(cacheDir, 'discussions')\n if (features.discussions && !existsSync(discussionsDir)) {\n onProgress('Fetching discussions via GitHub API')\n const discussions = await fetchGitHubDiscussions(gh.owner, gh.repo, 20).catch(() => [])\n if (discussions.length > 0) {\n onProgress(`Caching ${discussions.length} discussions`)\n writeToCache(packageName, version, discussions.map(d => ({\n path: `discussions/discussion-${d.number}.md`,\n content: formatDiscussionAsMarkdown(d),\n })))\n writeToCache(packageName, version, [{\n path: 'discussions/_INDEX.md',\n content: generateDiscussionIndex(discussions),\n }])\n hasDiscussions = true\n }\n }\n else {\n hasDiscussions = features.discussions && existsSync(discussionsDir)\n }\n\n return { hasIssues, hasDiscussions }\n}\n\n// ── package.json patching ──\n\nexport function patchPackageJsonFiles(packageDir: string): void {\n const pkgPath = join(packageDir, 'package.json')\n if (!existsSync(pkgPath))\n return\n\n const wrote = patchPackageJson(pkgPath, (raw, pkg) => {\n if (!Array.isArray(pkg.files)) {\n p.log.warn('No `files` array in package.json. Add `\"skills\"` to your files array manually.')\n return null\n }\n\n if ((pkg.files as string[]).some((f: string) => f === 'skills' || f === 'skills/' || f === 'skills/**'))\n return null\n\n return appendToJsonArray(raw, ['files'], 'skills')\n })\n\n if (wrote)\n p.log.success('Added `\"skills\"` to package.json files array')\n}\n\n// ── Core author flow for a single package ──\n\nasync function authorSinglePackage(opts: {\n packageDir: string\n packageName: string\n version: string\n description?: string\n repoUrl?: string\n monorepoRoot?: string\n out?: string\n llmConfig?: LlmConfig | null\n force?: boolean\n debug?: boolean\n}): Promise<string | null> {\n const { packageDir, packageName, version } = opts\n const spin = timedSpinner()\n\n const sanitizedName = computeSkillDirName(packageName)\n const outDir = opts.out ? resolve(packageDir, opts.out) : join(packageDir, 'skills', sanitizedName)\n\n // Validate --out doesn't point at the package root or a parent\n if (opts.out) {\n const rel = relative(packageDir, outDir)\n if (!rel || rel === '.' || rel.startsWith('..')) {\n p.log.error('--out must point to a child directory, not the package root or a parent')\n return null\n }\n }\n\n if (existsSync(outDir))\n rmSync(outDir, { recursive: true, force: true })\n mkdirSync(outDir, { recursive: true })\n\n if (opts.force) {\n forceClearCache(packageName, version)\n }\n\n ensureCacheDir()\n const features = readConfig().features ?? defaultFeatures\n\n // Resolve local docs\n spin.start('Resolving local docs')\n const { docsType, docSource } = resolveLocalDocs(packageDir, packageName, version, opts.monorepoRoot)\n spin.stop(`Resolved docs: ${docSource}`)\n\n // Fetch remote supplements (issues/discussions)\n const supSpin = timedSpinner()\n supSpin.start('Checking remote supplements')\n const { hasIssues, hasDiscussions } = await fetchRemoteSupplements({\n packageName,\n version,\n repoUrl: opts.repoUrl,\n features,\n onProgress: msg => supSpin.message(msg),\n })\n const supParts: string[] = []\n if (hasIssues)\n supParts.push('issues')\n if (hasDiscussions)\n supParts.push('discussions')\n supSpin.stop(supParts.length > 0 ? `Fetched ${supParts.join(', ')}` : 'No remote supplements')\n\n // Create temporary .skilld/ symlinks (LLM needs these to read docs)\n linkAllReferences(outDir, packageName, packageDir, version, docsType, undefined, features)\n\n // Detect changelog + releases\n const cacheDir = getCacheDir(packageName, version)\n const hasChangelog = detectChangelog(packageDir, cacheDir)\n const hasReleases = existsSync(join(cacheDir, 'releases'))\n\n // Generate base SKILL.md\n const baseSkillMd = generateSkillMd({\n name: packageName,\n version,\n description: opts.description,\n relatedSkills: [],\n hasIssues,\n hasDiscussions,\n hasReleases,\n hasChangelog,\n docsType,\n hasShippedDocs: false,\n pkgFiles: [],\n dirName: sanitizedName,\n repoUrl: opts.repoUrl,\n features,\n eject: true,\n })\n writeFileSync(join(outDir, 'SKILL.md'), baseSkillMd)\n p.log.success(`Created base skill: ${relative(packageDir, outDir)}`)\n\n // LLM enhancement (config resolved by caller)\n const skilldDir = join(outDir, '.skilld')\n try {\n const llmConfig = opts.llmConfig\n if (llmConfig?.promptOnly) {\n writePromptFiles({\n packageName,\n skillDir: outDir,\n version,\n hasIssues,\n hasDiscussions,\n hasReleases,\n hasChangelog,\n docsType,\n hasShippedDocs: false,\n pkgFiles: [],\n sections: llmConfig.sections,\n customPrompt: llmConfig.customPrompt,\n features,\n })\n }\n else if (llmConfig) {\n p.log.step(getModelLabel(llmConfig.model))\n await enhanceSkillWithLLM({\n packageName,\n version,\n skillDir: outDir,\n dirName: sanitizedName,\n model: llmConfig.model,\n resolved: { repoUrl: opts.repoUrl },\n relatedSkills: [],\n hasIssues,\n hasDiscussions,\n hasReleases,\n hasChangelog,\n docsType,\n hasShippedDocs: false,\n pkgFiles: [],\n force: opts.force,\n debug: opts.debug,\n sections: llmConfig.sections,\n customPrompt: llmConfig.customPrompt,\n features,\n eject: true,\n })\n }\n\n ejectReferences(outDir, packageName, packageDir, version, docsType, features)\n }\n finally {\n // Always clean up .skilld/ symlinks, even if LLM enhancement fails\n if (existsSync(skilldDir))\n rmSync(skilldDir, { recursive: true, force: true })\n }\n\n // Only patch package.json when output is under skills/\n const relOut = relative(packageDir, outDir)\n if (relOut === 'skills' || relOut.startsWith('skills/'))\n patchPackageJsonFiles(packageDir)\n else if (opts.out)\n p.log.info('Output is outside skills/, skipping package.json patch. Add the path to \"files\" manually if publishing.')\n\n return outDir\n}\n\n// ── Main command ──\n\nasync function resolveLlmConfig(model?: OptimizeModel, yes?: boolean): Promise<LlmConfig | null | undefined> {\n const globalConfig = readConfig()\n if (globalConfig.skipLlm || (yes && !model))\n return undefined\n return selectLlmConfig(model)\n}\n\nasync function authorCommand(opts: {\n out?: string\n model?: OptimizeModel\n yes?: boolean\n force?: boolean\n debug?: boolean\n}): Promise<void> {\n const cwd = process.cwd()\n\n // Check for monorepo\n const monoPackages = detectMonorepoPackages(cwd)\n\n if (monoPackages && monoPackages.length > 0) {\n p.intro(`\\x1B[1m\\x1B[35mskilld\\x1B[0m author \\x1B[90m(monorepo: ${monoPackages.length} packages)\\x1B[0m`)\n\n if (opts.out) {\n p.log.error('--out is not supported in monorepo mode (each package gets its own skills/ directory)')\n return\n }\n\n const selected = guard(await p.multiselect({\n message: 'Which packages should ship skills?',\n options: monoPackages.map(pkg => ({\n label: pkg.name,\n value: pkg,\n hint: pkg.description,\n })),\n }))\n\n if (selected.length === 0)\n return\n\n // Resolve LLM config once for all packages\n const llmConfig = await resolveLlmConfig(opts.model, opts.yes)\n\n // Resolve monorepo-level repoUrl for packages that lack their own\n const rootPkgPath = join(cwd, 'package.json')\n const rootPkg = JSON.parse(readFileSync(rootPkgPath, 'utf-8'))\n const rootRepoUrl = typeof rootPkg.repository === 'string'\n ? rootPkg.repository\n : rootPkg.repository?.url?.replace(/^git\\+/, '').replace(/\\.git$/, '')\n\n const results: Array<{ name: string, outDir: string }> = []\n\n for (const pkg of selected) {\n p.log.step(`\\x1B[36m${pkg.name}\\x1B[0m@${pkg.version}`)\n const outDir = await authorSinglePackage({\n packageDir: pkg.dir,\n packageName: pkg.name,\n version: pkg.version,\n description: pkg.description,\n repoUrl: pkg.repoUrl || rootRepoUrl,\n monorepoRoot: cwd,\n llmConfig,\n force: opts.force,\n debug: opts.debug,\n })\n if (outDir)\n results.push({ name: pkg.name, outDir })\n }\n\n if (results.length > 0) {\n p.log.message('')\n for (const { name, outDir } of results)\n p.log.success(`${name} → ${relative(cwd, outDir)}`)\n\n printConsumerGuidance(results.map(r => r.name))\n }\n\n p.outro('Done')\n return\n }\n\n // Single package mode\n const pkgInfo = readLocalPackageInfo(cwd)\n if (!pkgInfo) {\n p.log.error('No package.json found in current directory')\n return\n }\n\n const { name: packageName, version, repoUrl } = pkgInfo\n\n p.intro(`\\x1B[1m\\x1B[35mskilld\\x1B[0m author \\x1B[36m${packageName}\\x1B[0m@${version}`)\n\n const llmConfig = await resolveLlmConfig(opts.model, opts.yes)\n\n const outDir = await authorSinglePackage({\n packageDir: cwd,\n packageName,\n version,\n description: pkgInfo.description,\n repoUrl,\n out: opts.out,\n llmConfig,\n force: opts.force,\n debug: opts.debug,\n })\n\n if (outDir) {\n printConsumerGuidance([packageName])\n p.outro(`Authored skill to ${relative(cwd, outDir)}`)\n }\n}\n\nfunction printConsumerGuidance(packageNames: string[]): void {\n const names = packageNames.join(', ')\n p.log.info(\n `\\x1B[90mConsumers get ${packageNames.length > 1 ? 'these skills' : 'this skill'} automatically:\\x1B[0m\\n`\n + ` \\x1B[90m1. Install ${names} as a dependency\\x1B[0m\\n`\n + ` \\x1B[90m2. Run \\x1B[36mskilld prepare\\x1B[90m (or add to package.json: \\x1B[36m\"prepare\": \"skilld prepare\"\\x1B[90m)\\x1B[0m`,\n )\n}\n\nexport const authorCommandDef = defineCommand({\n meta: { name: 'author', description: 'Generate portable skill for npm publishing' },\n args: {\n out: {\n type: 'string',\n alias: 'o',\n description: 'Output directory (default: ./skills/<name>/)',\n },\n model: {\n type: 'string',\n alias: 'm',\n description: 'Enhancement model for SKILL.md generation',\n valueHint: 'id',\n },\n yes: {\n type: 'boolean',\n alias: 'y',\n description: 'Skip prompts, use defaults',\n default: false,\n },\n force: {\n type: 'boolean',\n alias: 'f',\n description: 'Clear cache and regenerate',\n default: false,\n },\n debug: {\n type: 'boolean',\n description: 'Save raw enhancement output to logs/',\n default: false,\n },\n },\n async run({ args }) {\n await authorCommand({\n out: args.out,\n model: args.model as OptimizeModel | undefined,\n yes: args.yes,\n force: args.force,\n debug: args.debug,\n })\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA2CA,MAAM,kBAAkB;AACxB,MAAM,kBAAkB;AAYxB,SAAgB,uBAAuB,KAAuC;CAC5E,MAAM,UAAU,KAAK,KAAK,eAAe;AACzC,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;CAET,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;AAGtD,KAAI,CAAC,IAAI,QACP,QAAO;CAET,IAAI,WAAqB,EAAE;AAE3B,KAAI,MAAM,QAAQ,IAAI,WAAW,CAC/B,YAAW,IAAI;UAER,IAAI,YAAY,SACvB,YAAW,IAAI,WAAW;AAI5B,KAAI,SAAS,WAAW,GAAG;EACzB,MAAM,SAAS,KAAK,KAAK,sBAAsB;AAC/C,MAAI,WAAW,OAAO,EAAE;GACtB,MAAM,QAAQ,aAAa,QAAQ,QAAQ,CAAC,MAAM,KAAK;AACvD,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,UAAU,KAAK,MAAM;AAC3B,QAAI,CAAC,QAAQ,WAAW,IAAI,CAC1B;IACF,MAAM,QAAQ,QAAQ,MAAM,EAAE,CAAC,MAAM,CAAC,QAAQ,iBAAiB,GAAG,CAAC,QAAQ,iBAAiB,GAAG;AAC/F,QAAI,MACF,UAAS,KAAK,MAAM;;;;AAK5B,KAAI,SAAS,WAAW,EACtB,QAAO;CAET,MAAM,WAA8B,EAAE;AAEtC,MAAK,MAAM,WAAW,UAAU;EAG9B,MAAM,UAAU,QAAQ,KADX,QAAQ,QAAQ,WAAW,GAAG,CACT;AAClC,MAAI,CAAC,WAAW,QAAQ,CACtB;AAEF,OAAK,MAAM,SAAS,YAAY,SAAS,EAAE,eAAe,MAAM,CAAC,EAAE;AACjE,OAAI,CAAC,MAAM,aAAa,CACtB;GACF,MAAM,cAAc,KAAK,SAAS,MAAM,MAAM,eAAe;AAC7D,OAAI,CAAC,WAAW,YAAY,CAC1B;GAEF,MAAM,WAAW,KAAK,MAAM,aAAa,aAAa,QAAQ,CAAC;AAC/D,OAAI,SAAS,QACX;AACF,OAAI,CAAC,SAAS,KACZ;GAEF,MAAM,UAAU,OAAO,SAAS,eAAe,WAC3C,SAAS,aACT,SAAS,YAAY,KAAK,QAAQ,UAAU,GAAG,CAAC,QAAQ,UAAU,GAAG;AAEzE,YAAS,KAAK;IACZ,MAAM,SAAS;IACf,SAAS,SAAS,WAAW;IAC7B,aAAa,SAAS;IACtB;IACA,KAAK,KAAK,SAAS,MAAM,KAAA;IAC1B,CAAC;;;AAIN,QAAO,SAAS,SAAS,IAAI,WAAW;;AAK1C,SAAS,kBAAkB,KAAa,OAAO,IAA8C;CAC3F,MAAM,UAAoD,EAAE;AAC5D,KAAI,CAAC,WAAW,IAAI,CAClB,QAAO;AAET,MAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;EAC7D,MAAM,MAAM,OAAO,GAAG,KAAK,GAAG,MAAM,SAAS,MAAM;EACnD,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,MAAI,MAAM,aAAa,CACrB,SAAQ,KAAK,GAAG,kBAAkB,MAAM,IAAI,CAAC;WAEtC,UAAU,KAAK,MAAM,KAAK,CACjC,SAAQ,KAAK;GAAE,MAAM;GAAK,SAAS,aAAa,MAAM,QAAA;GAAU,CAAC;;AAGrE,QAAO;;;;;;;;;;AAWT,SAAS,iBACP,YACA,aACA,SACA,cACiE;CACjE,MAAM,aAAuD,EAAE;CAE/D,MAAM,uBAAuB,oBAAoB,YAAY,aAAa,SAAS,aAAa;CAGhG,MAAM,UAAU,KAAK,YAAY,OAAO;AACxC,KAAI,WAAW,QAAQ,EAAE;EACvB,MAAM,UAAU,kBAAkB,QAAQ;AAC1C,MAAI,QAAQ,SAAS,GAAG;AACtB,QAAK,MAAM,KAAK,QACd,YAAW,KAAK;IAAE,MAAM,QAAQ,EAAE;IAAQ,SAAS,iBAAiB,EAAE,QAAA;IAAU,CAAC;AACnF,gBAAa,aAAa,SAAS,WAAW;AAC9C,mBAAgB;AAChB,UAAO;IAAE,UAAU;IAAQ,WAAW,gBAAgB,QAAQ,OAAO;IAAU;;;AAKnF,KAAI,aACF,MAAK,MAAM,aAAa,CAAC,gBAAgB,OAAO,EAAE;EAChD,MAAM,cAAc,KAAK,cAAc,UAAU;AACjD,MAAI,WAAW,YAAY,EAAE;GAC3B,MAAM,UAAU,kBAAkB,YAAY;AAC9C,OAAI,QAAQ,SAAS,GAAG;AACtB,SAAK,MAAM,KAAK,QACd,YAAW,KAAK;KAAE,MAAM,QAAQ,EAAE;KAAQ,SAAS,iBAAiB,EAAE,QAAA;KAAU,CAAC;AACnF,iBAAa,aAAa,SAAS,WAAW;AAC9C,oBAAgB;AAChB,WAAO;KAAE,UAAU;KAAQ,WAAW,YAAY,UAAU,KAAK,QAAQ,OAAO;KAAU;;;;AAOlG,MAAK,MAAM,OAAO,CAAC,YAAY,aAAa,CAAC,OAAO,QAAQ,EAAc;EACxE,MAAM,WAAW,KAAK,KAAK,WAAW;AACtC,MAAI,WAAW,SAAS,EAAE;AACxB,cAAW,KAAK;IAAE,MAAM;IAAY,SAAS,iBAAiB,aAAa,UAAU,QAAQ,CAAA;IAAG,CAAC;AACjG,gBAAa,aAAa,SAAS,WAAW;AAC9C,mBAAgB;AAEhB,UAAO;IAAE,UAAU;IAAY,WADhB,QAAQ,aAAa,mBAAmB;IACL;;;AAKtD,MAAK,MAAM,OAAO,CAAC,YAAY,aAAa,CAAC,OAAO,QAAQ,EAAc;EACxE,MAAM,aAAa,YAAY,IAAI,CAAC,MAAK,MAAK,gBAAgB,KAAK,EAAE,CAAC;AACtE,MAAI,YAAY;AACd,cAAW,KAAK;IAAE,MAAM;IAAkB,SAAS,iBAAiB,aAAa,KAAK,KAAK,WAAW,EAAE,QAAQ,CAAA;IAAG,CAAC;AACpH,gBAAa,aAAa,SAAS,WAAW;AAC9C,mBAAgB;AAEhB,UAAO;IAAE,UAAU;IAAU,WADd,QAAQ,aAAa,oBAAoB;IACR;;;AAIpD,iBAAgB;AAChB,QAAO;EAAE,UAAU;EAAU,WAAW;EAAQ;;AAGlD,SAAS,oBAAoB,KAAa,aAAqB,SAAiB,cAA6B;CAC3G,MAAM,aAAa,CAAC,gBAAgB,eAAe;CACnD,MAAM,gBAAgB,WAAW,MAAK,MAAK,WAAW,KAAK,KAAK,EAAE,CAAC,CAAC,KAC9D,eAAe,WAAW,MAAK,MAAK,WAAW,KAAK,cAAc,EAAE,CAAC,CAAC,GAAG,KAAA;CAC/E,MAAM,eAAe,iBAAiB,WAAW,KAAK,KAAK,cAAc,CAAC,GAAG,MAAM;AACnF,KAAI,iBAAiB,aACnB,cAAa,aAAa,SAAS,CAAC;EAClC,MAAM,YAAY;EAClB,SAAS,iBAAiB,aAAa,KAAK,cAAc,cAAc,EAAE,QAAQ,CAAA;EACnF,CAAC,CAAC;;AAMP,eAAe,uBAAuB,MAMuB;CAC3D,MAAM,EAAE,aAAa,SAAS,SAAS,UAAU,eAAe;AAEhE,KAAI,CAAC,WAAW,CAAC,eAAe,CAC9B,QAAO;EAAE,WAAW;EAAO,gBAAgB;EAAO;CAEpD,MAAM,KAAK,eAAe,QAAQ;AAClC,KAAI,CAAC,GACH,QAAO;EAAE,WAAW;EAAO,gBAAgB;EAAO;CAEpD,MAAM,WAAW,YAAY,aAAa,QAAQ;CAElD,IAAI,YAAY;CAChB,MAAM,YAAY,KAAK,UAAU,SAAS;AAC1C,KAAI,SAAS,UAAU,CAAC,WAAW,UAAU,EAAE;AAC7C,aAAW,iCAAiC;EAC5C,MAAM,SAAS,MAAM,kBAAkB,GAAG,OAAO,GAAG,MAAM,GAAG,CAAC,YAAY,EAAE,CAAC;AAC7E,MAAI,OAAO,SAAS,GAAG;AACrB,cAAW,WAAW,OAAO,OAAO,SAAS;AAC7C,gBAAa,aAAa,SAAS,OAAO,KAAI,WAAU;IACtD,MAAM,gBAAgB,MAAM,OAAO;IACnC,SAAS,sBAAsB,MAAA;IAChC,EAAE,CAAC;AACJ,gBAAa,aAAa,SAAS,CAAC;IAClC,MAAM;IACN,SAAS,mBAAmB,OAAA;IAC7B,CAAC,CAAC;AACH,eAAY;;OAId,aAAY,SAAS,UAAU,WAAW,UAAU;CAGtD,IAAI,iBAAiB;CACrB,MAAM,iBAAiB,KAAK,UAAU,cAAc;AACpD,KAAI,SAAS,eAAe,CAAC,WAAW,eAAe,EAAE;AACvD,aAAW,sCAAsC;EACjD,MAAM,cAAc,MAAM,uBAAuB,GAAG,OAAO,GAAG,MAAM,GAAG,CAAC,YAAY,EAAE,CAAC;AACvF,MAAI,YAAY,SAAS,GAAG;AAC1B,cAAW,WAAW,YAAY,OAAO,cAAc;AACvD,gBAAa,aAAa,SAAS,YAAY,KAAI,OAAM;IACvD,MAAM,0BAA0B,EAAE,OAAO;IACzC,SAAS,2BAA2B,EAAA;IACrC,EAAE,CAAC;AACJ,gBAAa,aAAa,SAAS,CAAC;IAClC,MAAM;IACN,SAAS,wBAAwB,YAAA;IAClC,CAAC,CAAC;AACH,oBAAiB;;OAInB,kBAAiB,SAAS,eAAe,WAAW,eAAe;AAGrE,QAAO;EAAE;EAAW;EAAgB;;AAKtC,SAAgB,sBAAsB,YAA0B;CAC9D,MAAM,UAAU,KAAK,YAAY,eAAe;AAChD,KAAI,CAAC,WAAW,QAAQ,CACtB;AAcF,KAZc,iBAAiB,UAAU,KAAK,QAAQ;AACpD,MAAI,CAAC,MAAM,QAAQ,IAAI,MAAM,EAAE;AAC7B,KAAE,IAAI,KAAK,mFAAiF;AAC5F,UAAO;;AAGT,MAAK,IAAI,MAAmB,MAAM,MAAc,MAAM,YAAY,MAAM,aAAa,MAAM,YAAY,CACrG,QAAO;AAET,SAAO,kBAAkB,KAAK,CAAC,QAAQ,EAAE,SAAS;GAClD,CAGA,GAAE,IAAI,QAAQ,iDAA+C;;AAKjE,eAAe,oBAAoB,MAWR;CACzB,MAAM,EAAE,YAAY,aAAa,YAAY;CAC7C,MAAM,OAAO,cAAc;CAE3B,MAAM,gBAAgB,oBAAoB,YAAY;CACtD,MAAM,SAAS,KAAK,MAAM,QAAQ,YAAY,KAAK,IAAI,GAAG,KAAK,YAAY,UAAU,cAAc;AAGnG,KAAI,KAAK,KAAK;EACZ,MAAM,MAAM,SAAS,YAAY,OAAO;AACxC,MAAI,CAAC,OAAO,QAAQ,OAAO,IAAI,WAAW,KAAK,EAAE;AAC/C,KAAE,IAAI,MAAM,0EAA0E;AACtF,UAAO;;;AAIX,KAAI,WAAW,OAAO,CACpB,QAAO,QAAQ;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAClD,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;AAEtC,KAAI,KAAK,MACP,iBAAgB,aAAa,QAAQ;AAGvC,iBAAgB;CAChB,MAAM,WAAW,YAAY,CAAC,YAAY;AAG1C,MAAK,MAAM,uBAAuB;CAClC,MAAM,EAAE,UAAU,cAAc,iBAAiB,YAAY,aAAa,SAAS,KAAK,aAAa;AACrG,MAAK,KAAK,kBAAkB,YAAY;CAGxC,MAAM,UAAU,cAAc;AAC9B,SAAQ,MAAM,8BAA8B;CAC5C,MAAM,EAAE,WAAW,mBAAmB,MAAM,uBAAuB;EACjE;EACA;EACA,SAAS,KAAK;EACd;EACA,aAAY,QAAO,QAAQ,QAAQ,IAAA;EACpC,CAAC;CACF,MAAM,WAAqB,EAAE;AAC7B,KAAI,UACF,UAAS,KAAK,SAAS;AACzB,KAAI,eACF,UAAS,KAAK,cAAc;AAC9B,SAAQ,KAAK,SAAS,SAAS,IAAI,WAAW,SAAS,KAAK,KAAK,KAAK,wBAAwB;AAG9F,mBAAkB,QAAQ,aAAa,YAAY,SAAS,UAAU,KAAA,GAAW,SAAS;CAG1F,MAAM,WAAW,YAAY,aAAa,QAAQ;CAClD,MAAM,eAAe,gBAAgB,YAAY,SAAS;CAC1D,MAAM,cAAc,WAAW,KAAK,UAAU,WAAW,CAAC;CAG1D,MAAM,cAAc,gBAAgB;EAClC,MAAM;EACN;EACA,aAAa,KAAK;EAClB,eAAe,EAAE;EACjB;EACA;EACA;EACA;EACA;EACA,gBAAgB;EAChB,UAAU,EAAE;EACZ,SAAS;EACT,SAAS,KAAK;EACd;EACA,OAAO;EACR,CAAC;AACF,eAAc,KAAK,QAAQ,WAAW,EAAE,YAAY;AACpD,GAAE,IAAI,QAAQ,uBAAuB,SAAS,YAAY,OAAO,GAAG;CAGpE,MAAM,YAAY,KAAK,QAAQ,UAAU;AACzC,KAAI;EACF,MAAM,YAAY,KAAK;AACvB,MAAI,WAAW,WACb,kBAAiB;GACf;GACA,UAAU;GACV;GACA;GACA;GACA;GACA;GACA;GACA,gBAAgB;GAChB,UAAU,EAAE;GACZ,UAAU,UAAU;GACpB,cAAc,UAAU;GACxB;GACD,CAAC;WAEK,WAAW;AAClB,KAAE,IAAI,KAAK,cAAc,UAAU,MAAM,CAAC;AAC1C,SAAM,oBAAoB;IACxB;IACA;IACA,UAAU;IACV,SAAS;IACT,OAAO,UAAU;IACjB,UAAU,EAAE,SAAS,KAAK,SAAS;IACnC,eAAe,EAAE;IACjB;IACA;IACA;IACA;IACA;IACA,gBAAgB;IAChB,UAAU,EAAE;IACZ,OAAO,KAAK;IACZ,OAAO,KAAK;IACZ,UAAU,UAAU;IACpB,cAAc,UAAU;IACxB;IACA,OAAO;IACR,CAAC;;AAGJ,kBAAgB,QAAQ,aAAa,YAAY,SAAS,UAAU,SAAS;WAEvE;AAEN,MAAI,WAAW,UAAU,CACvB,QAAO,WAAW;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;;CAIvD,MAAM,SAAS,SAAS,YAAY,OAAO;AAC3C,KAAI,WAAW,YAAY,OAAO,WAAW,UAAU,CACrD,uBAAsB,WAAW;UAC1B,KAAK,IACZ,GAAE,IAAI,KAAK,4GAA0G;AAEvH,QAAO;;AAKT,eAAe,iBAAiB,OAAuB,KAAsD;AAE3G,KADqB,YAAY,CAChB,WAAY,OAAO,CAAC,MACnC,QAAO,KAAA;AACT,QAAO,gBAAgB,MAAM;;AAG/B,eAAe,cAAc,MAMX;CAChB,MAAM,MAAM,QAAQ,KAAK;CAGzB,MAAM,eAAe,uBAAuB,IAAI;AAEhD,KAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,IAAE,MAAM,0DAA0D,aAAa,OAAO,mBAAmB;AAEzG,MAAI,KAAK,KAAK;AACZ,KAAE,IAAI,MAAM,wFAAwF;AACpG;;EAGF,MAAM,WAAW,MAAM,MAAM,EAAE,YAAY;GACzC,SAAS;GACT,SAAS,aAAa,KAAI,SAAQ;IAChC,OAAO,IAAI;IACX,OAAO;IACP,MAAM,IAAI;IACX,EAAA;GACF,CAAC,CAAC;AAEH,MAAI,SAAS,WAAW,EACtB;EAGF,MAAM,YAAY,MAAM,iBAAiB,KAAK,OAAO,KAAK,IAAI;EAG9D,MAAM,cAAc,KAAK,KAAK,eAAe;EAC7C,MAAM,UAAU,KAAK,MAAM,aAAa,aAAa,QAAQ,CAAC;EAC9D,MAAM,cAAc,OAAO,QAAQ,eAAe,WAC9C,QAAQ,aACR,QAAQ,YAAY,KAAK,QAAQ,UAAU,GAAG,CAAC,QAAQ,UAAU,GAAG;EAExE,MAAM,UAAmD,EAAE;AAE3D,OAAK,MAAM,OAAO,UAAU;AAC1B,KAAE,IAAI,KAAK,WAAW,IAAI,KAAK,UAAU,IAAI,UAAU;GACvD,MAAM,SAAS,MAAM,oBAAoB;IACvC,YAAY,IAAI;IAChB,aAAa,IAAI;IACjB,SAAS,IAAI;IACb,aAAa,IAAI;IACjB,SAAS,IAAI,WAAW;IACxB,cAAc;IACd;IACA,OAAO,KAAK;IACZ,OAAO,KAAK;IACb,CAAC;AACF,OAAI,OACF,SAAQ,KAAK;IAAE,MAAM,IAAI;IAAM;IAAQ,CAAC;;AAG5C,MAAI,QAAQ,SAAS,GAAG;AACtB,KAAE,IAAI,QAAQ,GAAG;AACjB,QAAK,MAAM,EAAE,MAAM,YAAY,QAC7B,GAAE,IAAI,QAAQ,GAAG,KAAK,KAAK,SAAS,KAAK,OAAO,GAAG;AAErD,yBAAsB,QAAQ,KAAI,MAAK,EAAE,KAAK,CAAC;;AAGjD,IAAE,MAAM,OAAO;AACf;;CAIF,MAAM,UAAU,qBAAqB,IAAI;AACzC,KAAI,CAAC,SAAS;AACZ,IAAE,IAAI,MAAM,6CAA6C;AACzD;;CAGF,MAAM,EAAE,MAAM,aAAa,SAAS,YAAY;AAEhD,GAAE,MAAM,+CAA+C,YAAY,UAAU,UAAU;CAEvF,MAAM,YAAY,MAAM,iBAAiB,KAAK,OAAO,KAAK,IAAI;CAE9D,MAAM,SAAS,MAAM,oBAAoB;EACvC,YAAY;EACZ;EACA;EACA,aAAa,QAAQ;EACrB;EACA,KAAK,KAAK;EACV;EACA,OAAO,KAAK;EACZ,OAAO,KAAK;EACb,CAAC;AAEF,KAAI,QAAQ;AACV,wBAAsB,CAAC,YAAY,CAAC;AACpC,IAAE,MAAM,qBAAqB,SAAS,KAAK,OAAO,GAAG;;;AAIzD,SAAS,sBAAsB,cAA8B;CAC3D,MAAM,QAAQ,aAAa,KAAK,KAAK;AACrC,GAAE,IAAI,KACJ,yBAAyB,aAAa,SAAS,IAAI,iBAAiB,aAAa,+CACvD,MAAM,uJAEjC;;AAGH,MAAa,mBAAmB,cAAc;CAC5C,MAAM;EAAE,MAAM;EAAU,aAAa;EAA8C;CACnF,MAAM;EACJ,KAAK;GACH,MAAM;GACN,OAAO;GACP,aAAa;GACd;EACD,OAAO;GACL,MAAM;GACN,OAAO;GACP,aAAa;GACb,WAAW;GACZ;EACD,KAAK;GACH,MAAM;GACN,OAAO;GACP,aAAa;GACb,SAAS;GACV;EACD,OAAO;GACL,MAAM;GACN,OAAO;GACP,aAAa;GACb,SAAS;GACV;EACD,OAAO;GACL,MAAM;GACN,aAAa;GACb,SAAS;;EAEZ;CACD,MAAM,IAAI,EAAE,QAAQ;AAClB,QAAM,cAAc;GAClB,KAAK,KAAK;GACV,OAAO,KAAK;GACZ,KAAK,KAAK;GACV,OAAO,KAAK;GACZ,OAAO,KAAK;GACb,CAAC;;CAEL,CAAC"}