skilld 1.5.5 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/README.md +32 -23
  2. package/dist/_chunks/agent.mjs +2 -78
  3. package/dist/_chunks/agent.mjs.map +1 -1
  4. package/dist/_chunks/assemble.mjs +1 -18
  5. package/dist/_chunks/assemble.mjs.map +1 -1
  6. package/dist/_chunks/author-group.mjs +17 -0
  7. package/dist/_chunks/author-group.mjs.map +1 -0
  8. package/dist/_chunks/author.mjs +8 -24
  9. package/dist/_chunks/author.mjs.map +1 -1
  10. package/dist/_chunks/cache.mjs +1 -73
  11. package/dist/_chunks/cache.mjs.map +1 -1
  12. package/dist/_chunks/cache2.mjs +84 -17
  13. package/dist/_chunks/cache2.mjs.map +1 -1
  14. package/dist/_chunks/cli-helpers.mjs +3 -166
  15. package/dist/_chunks/cli-helpers.mjs.map +1 -1
  16. package/dist/_chunks/cli-helpers2.mjs +0 -11
  17. package/dist/_chunks/config.mjs +119 -54
  18. package/dist/_chunks/config.mjs.map +1 -1
  19. package/dist/_chunks/core.mjs +9 -0
  20. package/dist/_chunks/detect.mjs +29 -226
  21. package/dist/_chunks/detect.mjs.map +1 -1
  22. package/dist/_chunks/embedding-cache.mjs +0 -5
  23. package/dist/_chunks/embedding-cache2.mjs +2 -3
  24. package/dist/_chunks/formatting.mjs +0 -6
  25. package/dist/_chunks/formatting.mjs.map +1 -1
  26. package/dist/_chunks/index.d.mts +0 -10
  27. package/dist/_chunks/index.d.mts.map +1 -1
  28. package/dist/_chunks/index2.d.mts +3 -6
  29. package/dist/_chunks/index2.d.mts.map +1 -1
  30. package/dist/_chunks/index3.d.mts +81 -109
  31. package/dist/_chunks/index3.d.mts.map +1 -1
  32. package/dist/_chunks/install.mjs +85 -550
  33. package/dist/_chunks/install.mjs.map +1 -1
  34. package/dist/_chunks/install2.mjs +554 -0
  35. package/dist/_chunks/install2.mjs.map +1 -0
  36. package/dist/_chunks/libs/@sinclair/typebox.mjs +0 -444
  37. package/dist/_chunks/libs/@sinclair/typebox.mjs.map +1 -1
  38. package/dist/_chunks/list.mjs +0 -16
  39. package/dist/_chunks/list.mjs.map +1 -1
  40. package/dist/_chunks/lockfile.mjs +2 -10
  41. package/dist/_chunks/lockfile.mjs.map +1 -1
  42. package/dist/_chunks/markdown.mjs +0 -9
  43. package/dist/_chunks/markdown.mjs.map +1 -1
  44. package/dist/_chunks/package-json.mjs +0 -25
  45. package/dist/_chunks/package-json.mjs.map +1 -1
  46. package/dist/_chunks/package-registry.mjs +465 -0
  47. package/dist/_chunks/package-registry.mjs.map +1 -0
  48. package/dist/_chunks/pool2.mjs +0 -2
  49. package/dist/_chunks/pool2.mjs.map +1 -1
  50. package/dist/_chunks/prefix.mjs +108 -0
  51. package/dist/_chunks/prefix.mjs.map +1 -0
  52. package/dist/_chunks/prepare.mjs +14 -9
  53. package/dist/_chunks/prepare.mjs.map +1 -1
  54. package/dist/_chunks/prepare2.mjs +1 -19
  55. package/dist/_chunks/prepare2.mjs.map +1 -1
  56. package/dist/_chunks/prompts.mjs +6 -201
  57. package/dist/_chunks/prompts.mjs.map +1 -1
  58. package/dist/_chunks/retriv.mjs +23 -24
  59. package/dist/_chunks/retriv.mjs.map +1 -1
  60. package/dist/_chunks/rolldown-runtime.mjs +0 -2
  61. package/dist/_chunks/sanitize.mjs +0 -78
  62. package/dist/_chunks/sanitize.mjs.map +1 -1
  63. package/dist/_chunks/search-helpers.mjs +99 -0
  64. package/dist/_chunks/search-helpers.mjs.map +1 -0
  65. package/dist/_chunks/search-interactive.mjs +1 -18
  66. package/dist/_chunks/search-interactive.mjs.map +1 -1
  67. package/dist/_chunks/search.mjs +218 -19
  68. package/dist/_chunks/search.mjs.map +1 -0
  69. package/dist/_chunks/setup.mjs +0 -13
  70. package/dist/_chunks/setup.mjs.map +1 -1
  71. package/dist/_chunks/shared.mjs +1 -473
  72. package/dist/_chunks/shared.mjs.map +1 -1
  73. package/dist/_chunks/skills.mjs +3 -3
  74. package/dist/_chunks/skills.mjs.map +1 -1
  75. package/dist/_chunks/sources.mjs +1179 -1440
  76. package/dist/_chunks/sources.mjs.map +1 -1
  77. package/dist/_chunks/sync-registry.mjs +59 -0
  78. package/dist/_chunks/sync-registry.mjs.map +1 -0
  79. package/dist/_chunks/sync-shared.mjs +0 -16
  80. package/dist/_chunks/sync-shared2.mjs +10 -49
  81. package/dist/_chunks/sync-shared2.mjs.map +1 -1
  82. package/dist/_chunks/sync.mjs +209 -120
  83. package/dist/_chunks/sync.mjs.map +1 -1
  84. package/dist/_chunks/sync2.mjs +1 -21
  85. package/dist/_chunks/types.d.mts +0 -2
  86. package/dist/_chunks/types.d.mts.map +1 -1
  87. package/dist/_chunks/uninstall.mjs +3 -27
  88. package/dist/_chunks/uninstall.mjs.map +1 -1
  89. package/dist/_chunks/upload.mjs +152 -0
  90. package/dist/_chunks/upload.mjs.map +1 -0
  91. package/dist/_chunks/validate.mjs +1 -8
  92. package/dist/_chunks/validate.mjs.map +1 -1
  93. package/dist/_chunks/version.mjs +30 -0
  94. package/dist/_chunks/version.mjs.map +1 -0
  95. package/dist/_chunks/wizard.mjs +2 -3
  96. package/dist/_chunks/yaml.mjs +0 -21
  97. package/dist/_chunks/yaml.mjs.map +1 -1
  98. package/dist/agent/index.d.mts +0 -24
  99. package/dist/agent/index.d.mts.map +1 -1
  100. package/dist/agent/index.mjs +2 -9
  101. package/dist/cache/index.mjs +1 -3
  102. package/dist/cli-entry.mjs +0 -6
  103. package/dist/cli-entry.mjs.map +1 -1
  104. package/dist/cli.mjs +48 -33
  105. package/dist/cli.mjs.map +1 -1
  106. package/dist/index.d.mts +1 -1
  107. package/dist/index.mjs +2 -8
  108. package/dist/prepare.mjs +0 -12
  109. package/dist/prepare.mjs.map +1 -1
  110. package/dist/retriv/index.mjs +0 -2
  111. package/dist/retriv/worker.d.mts +0 -3
  112. package/dist/retriv/worker.d.mts.map +1 -1
  113. package/dist/retriv/worker.mjs +0 -2
  114. package/dist/retriv/worker.mjs.map +1 -1
  115. package/dist/sources/index.d.mts +2 -2
  116. package/dist/sources/index.mjs +3 -7
  117. package/dist/types.d.mts +1 -1
  118. package/package.json +20 -21
  119. package/dist/_chunks/search2.mjs +0 -319
  120. package/dist/_chunks/search2.mjs.map +0 -1
@@ -1,22 +1,7 @@
1
- import "./config.mjs";
2
- import "./package-json.mjs";
3
- import "./prepare.mjs";
4
- import "./sanitize.mjs";
5
- import "./cache.mjs";
6
- import "./yaml.mjs";
7
- import "./markdown.mjs";
8
- import "./shared.mjs";
9
- import "./sources.mjs";
10
- import "./detect.mjs";
11
- import "./prompts.mjs";
12
- import "./agent.mjs";
13
- import "./libs/@sinclair/typebox.mjs";
14
1
  import { x as sharedArgs } from "./cli-helpers.mjs";
15
- import "./lockfile.mjs";
16
2
  import { i as iterateSkills, t as getProjectState } from "./skills.mjs";
17
3
  import { c as timeAgo, i as formatSource } from "./formatting.mjs";
18
4
  import { defineCommand } from "citty";
19
- //#region src/commands/list.ts
20
5
  async function listCommand(opts = {}) {
21
6
  if (opts.outdated) {
22
7
  const entries = (await getProjectState()).outdated.map((skill) => ({
@@ -111,7 +96,6 @@ const listCommandDef = defineCommand({
111
96
  });
112
97
  }
113
98
  });
114
- //#endregion
115
99
  export { listCommandDef };
116
100
 
117
101
  //# sourceMappingURL=list.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"list.mjs","names":[],"sources":["../../src/commands/list.ts"],"sourcesContent":["import { defineCommand } from 'citty'\nimport { sharedArgs } from '../cli-helpers.ts'\nimport { formatSource, timeAgo } from '../core/formatting.ts'\nimport { getProjectState, iterateSkills } from '../core/skills.ts'\n\nexport interface ListOptions {\n global?: boolean\n json?: boolean\n outdated?: boolean\n}\n\ninterface ListEntry {\n name: string\n version: string\n source: string\n synced: string\n latest?: string\n}\n\nexport async function listCommand(opts: ListOptions = {}): Promise<void> {\n if (opts.outdated) {\n const state = await getProjectState()\n const entries: ListEntry[] = state.outdated.map(skill => ({\n name: skill.name,\n version: skill.info?.version || '',\n latest: skill.latestVersion || '',\n source: formatSource(skill.info?.source),\n synced: timeAgo(skill.info?.syncedAt),\n }))\n\n if (opts.json) {\n process.stdout.write(`${JSON.stringify(entries)}\\n`)\n return\n }\n\n if (entries.length === 0) {\n process.stdout.write('All skills are up to date\\n')\n return\n }\n\n const nameW = Math.max(...entries.map(e => e.name.length))\n const verW = Math.max(...entries.map(e => e.version.length))\n const latW = Math.max(...entries.map(e => (e.latest || '').length))\n const srcW = Math.max(...entries.map(e => e.source.length))\n\n for (const e of entries) {\n const line = [\n e.name.padEnd(nameW),\n `${e.version.padEnd(verW)} → ${(e.latest || '').padEnd(latW)}`,\n e.source.padEnd(srcW),\n e.synced,\n ].join(' ')\n process.stdout.write(`${line}\\n`)\n }\n return\n }\n\n const scope = opts.global ? 'global' : 'all'\n const skills = [...iterateSkills({ scope })]\n\n // Deduplicate by package identity\n const seen = new Set<string>()\n const entries: ListEntry[] = []\n\n for (const skill of skills) {\n const key = skill.info?.packageName || skill.name\n if (seen.has(key))\n continue\n seen.add(key)\n entries.push({\n name: skill.name,\n version: skill.info?.version || '',\n source: formatSource(skill.info?.source),\n synced: timeAgo(skill.info?.syncedAt),\n })\n }\n\n if (opts.json) {\n process.stdout.write(`${JSON.stringify(entries)}\\n`)\n return\n }\n\n if (entries.length === 0) {\n process.stdout.write('No skills installed\\n')\n return\n }\n\n // Column widths\n const nameW = Math.max(...entries.map(e => e.name.length))\n const verW = Math.max(...entries.map(e => e.version.length))\n const srcW = Math.max(...entries.map(e => e.source.length))\n\n for (const e of entries) {\n const line = [\n e.name.padEnd(nameW),\n e.version.padEnd(verW),\n e.source.padEnd(srcW),\n e.synced,\n ].join(' ')\n process.stdout.write(`${line}\\n`)\n }\n}\n\nexport const listCommandDef = defineCommand({\n meta: { name: 'list', description: 'List installed skills' },\n args: {\n global: sharedArgs.global,\n json: {\n type: 'boolean' as const,\n description: 'Output as JSON',\n default: false,\n },\n outdated: {\n type: 'boolean' as const,\n alias: 'o',\n description: 'Show only outdated skills',\n default: false,\n },\n },\n run({ args }) {\n return listCommand({ global: args.global, json: args.json, outdated: args.outdated })\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAmBA,eAAsB,YAAY,OAAoB,EAAE,EAAiB;AACvE,KAAI,KAAK,UAAU;EAEjB,MAAM,WADQ,MAAM,iBAAiB,EACF,SAAS,KAAI,WAAU;GACxD,MAAM,MAAM;GACZ,SAAS,MAAM,MAAM,WAAW;GAChC,QAAQ,MAAM,iBAAiB;GAC/B,QAAQ,aAAa,MAAM,MAAM,OAAO;GACxC,QAAQ,QAAQ,MAAM,MAAM,SAAA;GAC7B,EAAE;AAEH,MAAI,KAAK,MAAM;AACb,WAAQ,OAAO,MAAM,GAAG,KAAK,UAAU,QAAQ,CAAC,IAAI;AACpD;;AAGF,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAQ,OAAO,MAAM,8BAA8B;AACnD;;EAGF,MAAM,QAAQ,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,KAAK,OAAO,CAAC;EAC1D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,QAAQ,OAAO,CAAC;EAC5D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,OAAM,EAAE,UAAU,IAAI,OAAO,CAAC;EACnE,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,OAAO,OAAO,CAAC;AAE3D,OAAK,MAAM,KAAK,SAAS;GACvB,MAAM,OAAO;IACX,EAAE,KAAK,OAAO,MAAM;IACpB,GAAG,EAAE,QAAQ,OAAO,KAAK,CAAC,QAAQ,EAAE,UAAU,IAAI,OAAO,KAAK;IAC9D,EAAE,OAAO,OAAO,KAAK;IACrB,EAAE;IACH,CAAC,KAAK,KAAK;AACZ,WAAQ,OAAO,MAAM,GAAG,KAAK,IAAI;;AAEnC;;CAIF,MAAM,SAAS,CAAC,GAAG,cAAc,EAAE,OADrB,KAAK,SAAS,WAAW,OACG,CAAC,CAAC;CAG5C,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,MAAM,MAAM,MAAM,eAAe,MAAM;AAC7C,MAAI,KAAK,IAAI,IAAI,CACf;AACF,OAAK,IAAI,IAAI;AACb,UAAQ,KAAK;GACX,MAAM,MAAM;GACZ,SAAS,MAAM,MAAM,WAAW;GAChC,QAAQ,aAAa,MAAM,MAAM,OAAO;GACxC,QAAQ,QAAQ,MAAM,MAAM,SAAA;GAC7B,CAAC;;AAGJ,KAAI,KAAK,MAAM;AACb,UAAQ,OAAO,MAAM,GAAG,KAAK,UAAU,QAAQ,CAAC,IAAI;AACpD;;AAGF,KAAI,QAAQ,WAAW,GAAG;AACxB,UAAQ,OAAO,MAAM,wBAAwB;AAC7C;;CAIF,MAAM,QAAQ,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,KAAK,OAAO,CAAC;CAC1D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,QAAQ,OAAO,CAAC;CAC5D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,OAAO,OAAO,CAAC;AAE3D,MAAK,MAAM,KAAK,SAAS;EACvB,MAAM,OAAO;GACX,EAAE,KAAK,OAAO,MAAM;GACpB,EAAE,QAAQ,OAAO,KAAK;GACtB,EAAE,OAAO,OAAO,KAAK;GACrB,EAAE;GACH,CAAC,KAAK,KAAK;AACZ,UAAQ,OAAO,MAAM,GAAG,KAAK,IAAI;;;AAIrC,MAAa,iBAAiB,cAAc;CAC1C,MAAM;EAAE,MAAM;EAAQ,aAAa;EAAyB;CAC5D,MAAM;EACJ,QAAQ,WAAW;EACnB,MAAM;GACJ,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,UAAU;GACR,MAAM;GACN,OAAO;GACP,aAAa;GACb,SAAS;;EAEZ;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,YAAY;GAAE,QAAQ,KAAK;GAAQ,MAAM,KAAK;GAAM,UAAU,KAAK;GAAU,CAAC;;CAExF,CAAC"}
1
+ {"version":3,"file":"list.mjs","names":[],"sources":["../../src/commands/list.ts"],"sourcesContent":["import { defineCommand } from 'citty'\nimport { sharedArgs } from '../cli-helpers.ts'\nimport { formatSource, timeAgo } from '../core/formatting.ts'\nimport { getProjectState, iterateSkills } from '../core/skills.ts'\n\nexport interface ListOptions {\n global?: boolean\n json?: boolean\n outdated?: boolean\n}\n\ninterface ListEntry {\n name: string\n version: string\n source: string\n synced: string\n latest?: string\n}\n\nexport async function listCommand(opts: ListOptions = {}): Promise<void> {\n if (opts.outdated) {\n const state = await getProjectState()\n const entries: ListEntry[] = state.outdated.map(skill => ({\n name: skill.name,\n version: skill.info?.version || '',\n latest: skill.latestVersion || '',\n source: formatSource(skill.info?.source),\n synced: timeAgo(skill.info?.syncedAt),\n }))\n\n if (opts.json) {\n process.stdout.write(`${JSON.stringify(entries)}\\n`)\n return\n }\n\n if (entries.length === 0) {\n process.stdout.write('All skills are up to date\\n')\n return\n }\n\n const nameW = Math.max(...entries.map(e => e.name.length))\n const verW = Math.max(...entries.map(e => e.version.length))\n const latW = Math.max(...entries.map(e => (e.latest || '').length))\n const srcW = Math.max(...entries.map(e => e.source.length))\n\n for (const e of entries) {\n const line = [\n e.name.padEnd(nameW),\n `${e.version.padEnd(verW)} → ${(e.latest || '').padEnd(latW)}`,\n e.source.padEnd(srcW),\n e.synced,\n ].join(' ')\n process.stdout.write(`${line}\\n`)\n }\n return\n }\n\n const scope = opts.global ? 'global' : 'all'\n const skills = [...iterateSkills({ scope })]\n\n // Deduplicate by package identity\n const seen = new Set<string>()\n const entries: ListEntry[] = []\n\n for (const skill of skills) {\n const key = skill.info?.packageName || skill.name\n if (seen.has(key))\n continue\n seen.add(key)\n entries.push({\n name: skill.name,\n version: skill.info?.version || '',\n source: formatSource(skill.info?.source),\n synced: timeAgo(skill.info?.syncedAt),\n })\n }\n\n if (opts.json) {\n process.stdout.write(`${JSON.stringify(entries)}\\n`)\n return\n }\n\n if (entries.length === 0) {\n process.stdout.write('No skills installed\\n')\n return\n }\n\n // Column widths\n const nameW = Math.max(...entries.map(e => e.name.length))\n const verW = Math.max(...entries.map(e => e.version.length))\n const srcW = Math.max(...entries.map(e => e.source.length))\n\n for (const e of entries) {\n const line = [\n e.name.padEnd(nameW),\n e.version.padEnd(verW),\n e.source.padEnd(srcW),\n e.synced,\n ].join(' ')\n process.stdout.write(`${line}\\n`)\n }\n}\n\nexport const listCommandDef = defineCommand({\n meta: { name: 'list', description: 'List installed skills' },\n args: {\n global: sharedArgs.global,\n json: {\n type: 'boolean' as const,\n description: 'Output as JSON',\n default: false,\n },\n outdated: {\n type: 'boolean' as const,\n alias: 'o',\n description: 'Show only outdated skills',\n default: false,\n },\n },\n run({ args }) {\n return listCommand({ global: args.global, json: args.json, outdated: args.outdated })\n },\n})\n"],"mappings":";;;;AAmBA,eAAsB,YAAY,OAAoB,EAAE,EAAiB;AACvE,KAAI,KAAK,UAAU;EAEjB,MAAM,WADQ,MAAM,iBAAiB,EACF,SAAS,KAAI,WAAU;GACxD,MAAM,MAAM;GACZ,SAAS,MAAM,MAAM,WAAW;GAChC,QAAQ,MAAM,iBAAiB;GAC/B,QAAQ,aAAa,MAAM,MAAM,OAAO;GACxC,QAAQ,QAAQ,MAAM,MAAM,SAAA;GAC7B,EAAE;AAEH,MAAI,KAAK,MAAM;AACb,WAAQ,OAAO,MAAM,GAAG,KAAK,UAAU,QAAQ,CAAC,IAAI;AACpD;;AAGF,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAQ,OAAO,MAAM,8BAA8B;AACnD;;EAGF,MAAM,QAAQ,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,KAAK,OAAO,CAAC;EAC1D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,QAAQ,OAAO,CAAC;EAC5D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,OAAM,EAAE,UAAU,IAAI,OAAO,CAAC;EACnE,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,OAAO,OAAO,CAAC;AAE3D,OAAK,MAAM,KAAK,SAAS;GACvB,MAAM,OAAO;IACX,EAAE,KAAK,OAAO,MAAM;IACpB,GAAG,EAAE,QAAQ,OAAO,KAAK,CAAC,QAAQ,EAAE,UAAU,IAAI,OAAO,KAAK;IAC9D,EAAE,OAAO,OAAO,KAAK;IACrB,EAAE;IACH,CAAC,KAAK,KAAK;AACZ,WAAQ,OAAO,MAAM,GAAG,KAAK,IAAI;;AAEnC;;CAIF,MAAM,SAAS,CAAC,GAAG,cAAc,EAAE,OADrB,KAAK,SAAS,WAAW,OACG,CAAC,CAAC;CAG5C,MAAM,uBAAO,IAAI,KAAa;CAC9B,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,MAAM,MAAM,MAAM,eAAe,MAAM;AAC7C,MAAI,KAAK,IAAI,IAAI,CACf;AACF,OAAK,IAAI,IAAI;AACb,UAAQ,KAAK;GACX,MAAM,MAAM;GACZ,SAAS,MAAM,MAAM,WAAW;GAChC,QAAQ,aAAa,MAAM,MAAM,OAAO;GACxC,QAAQ,QAAQ,MAAM,MAAM,SAAA;GAC7B,CAAC;;AAGJ,KAAI,KAAK,MAAM;AACb,UAAQ,OAAO,MAAM,GAAG,KAAK,UAAU,QAAQ,CAAC,IAAI;AACpD;;AAGF,KAAI,QAAQ,WAAW,GAAG;AACxB,UAAQ,OAAO,MAAM,wBAAwB;AAC7C;;CAIF,MAAM,QAAQ,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,KAAK,OAAO,CAAC;CAC1D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,QAAQ,OAAO,CAAC;CAC5D,MAAM,OAAO,KAAK,IAAI,GAAG,QAAQ,KAAI,MAAK,EAAE,OAAO,OAAO,CAAC;AAE3D,MAAK,MAAM,KAAK,SAAS;EACvB,MAAM,OAAO;GACX,EAAE,KAAK,OAAO,MAAM;GACpB,EAAE,QAAQ,OAAO,KAAK;GACtB,EAAE,OAAO,OAAO,KAAK;GACrB,EAAE;GACH,CAAC,KAAK,KAAK;AACZ,UAAQ,OAAO,MAAM,GAAG,KAAK,IAAI;;;AAIrC,MAAa,iBAAiB,cAAc;CAC1C,MAAM;EAAE,MAAM;EAAQ,aAAa;EAAyB;CAC5D,MAAM;EACJ,QAAQ,WAAW;EACnB,MAAM;GACJ,MAAM;GACN,aAAa;GACb,SAAS;GACV;EACD,UAAU;GACR,MAAM;GACN,OAAO;GACP,aAAa;GACb,SAAS;;EAEZ;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,YAAY;GAAE,QAAQ,KAAK;GAAQ,MAAM,KAAK;GAAM,UAAU,KAAK;GAAU,CAAC;;CAExF,CAAC"}
@@ -2,7 +2,6 @@ import { n as yamlParseKV, t as yamlEscape } from "./yaml.mjs";
2
2
  import { i as parseFrontmatter } from "./markdown.mjs";
3
3
  import { join } from "pathe";
4
4
  import { existsSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
5
- //#region src/core/lockfile.ts
6
5
  function parsePackages(packages) {
7
6
  if (!packages) return [];
8
7
  return packages.split(",").map((s) => {
@@ -51,7 +50,7 @@ function invalidateLockCache(skillsDir) {
51
50
  }
52
51
  function readLock(skillsDir) {
53
52
  const cached = lockCache.get(skillsDir);
54
- if (cached) return cached;
53
+ if (cached) return { skills: { ...cached.skills } };
55
54
  const lockPath = join(skillsDir, "skilld-lock.yaml");
56
55
  if (!existsSync(lockPath)) return null;
57
56
  const content = readFileSync(lockPath, "utf-8");
@@ -69,6 +68,7 @@ function readLock(skillsDir) {
69
68
  if (kv && isSkillInfoKey(kv[0])) skills[currentSkill][kv[0]] = kv[1];
70
69
  }
71
70
  }
71
+ for (const info of Object.values(skills)) if (info.source === "npm") info.source = "registry";
72
72
  const lock = { skills };
73
73
  lockCache.set(skillsDir, lock);
74
74
  return { skills: { ...lock.skills } };
@@ -109,9 +109,6 @@ function writeLock(skillsDir, skillName, info) {
109
109
  writeFileSync(lockPath, serializeLock(lock));
110
110
  invalidateLockCache(skillsDir);
111
111
  }
112
- /**
113
- * Merge multiple lockfiles, preferring the most recently synced entry per skill.
114
- */
115
112
  function mergeLocks(locks) {
116
113
  const merged = {};
117
114
  for (const lock of locks) for (const [name, info] of Object.entries(lock.skills)) {
@@ -120,10 +117,6 @@ function mergeLocks(locks) {
120
117
  }
121
118
  return { skills: merged };
122
119
  }
123
- /**
124
- * Sync a lockfile to all other dirs that already have a skilld-lock.yaml.
125
- * Only updates existing lockfiles — does not create new ones.
126
- */
127
120
  function syncLockfilesToDirs(sourceLock, dirs) {
128
121
  for (const dir of dirs) {
129
122
  const lockPath = join(dir, "skilld-lock.yaml");
@@ -147,7 +140,6 @@ function removeLockEntry(skillsDir, skillName) {
147
140
  writeFileSync(lockPath, serializeLock(lock));
148
141
  invalidateLockCache(skillsDir);
149
142
  }
150
- //#endregion
151
143
  export { removeLockEntry as a, readLock as i, parsePackages as n, syncLockfilesToDirs as o, parseSkillFrontmatter as r, writeLock as s, mergeLocks as t };
152
144
 
153
145
  //# sourceMappingURL=lockfile.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"lockfile.mjs","names":[],"sources":["../../src/core/lockfile.ts"],"sourcesContent":["import { existsSync, readFileSync, unlinkSync, writeFileSync } from 'node:fs'\nimport { join } from 'pathe'\nimport { parseFrontmatter } from './markdown.ts'\nimport { yamlEscape, yamlParseKV } from './yaml.ts'\n\nexport interface SkillInfo {\n packageName?: string\n version?: string\n /** All tracked packages as comma-separated \"name@version\" pairs (multi-package skills) */\n packages?: string\n repo?: string\n source?: string\n syncedAt?: string\n generator?: string\n /** Skill path within repo (git-sourced skills) */\n path?: string\n /** Git ref tracked for updates */\n ref?: string\n /** Git commit SHA at install time */\n commit?: string\n}\n\nexport function parsePackages(packages?: string): Array<{ name: string, version: string }> {\n if (!packages)\n return []\n return packages.split(',').map((s) => {\n const trimmed = s.trim()\n const atIdx = trimmed.lastIndexOf('@')\n if (atIdx <= 0)\n return { name: trimmed, version: '' }\n return { name: trimmed.slice(0, atIdx), version: trimmed.slice(atIdx + 1) }\n }).filter(p => p.name)\n}\n\nexport function serializePackages(pkgs: Array<{ name: string, version: string }>): string {\n return pkgs.map(p => `${p.name}@${p.version}`).join(', ')\n}\n\nexport interface SkilldLock {\n skills: Record<string, SkillInfo>\n}\n\nconst SKILL_FM_KEYS: (keyof SkillInfo)[] = ['packageName', 'version', 'packages', 'repo', 'source', 'syncedAt', 'generator', 'path', 'ref', 'commit']\n\nfunction isSkillInfoKey(key: string): key is keyof SkillInfo {\n return (SKILL_FM_KEYS as readonly string[]).includes(key)\n}\n\nexport function parseSkillFrontmatter(skillPath: string): SkillInfo | null {\n if (!existsSync(skillPath))\n return null\n const content = readFileSync(skillPath, 'utf-8')\n const fm = parseFrontmatter(content)\n if (Object.keys(fm).length === 0)\n return null\n\n const info: SkillInfo = {}\n for (const key of SKILL_FM_KEYS) {\n if (fm[key])\n info[key] = fm[key]\n }\n return info\n}\n\nconst lockCache = new Map<string, SkilldLock>()\n\nexport function invalidateLockCache(skillsDir?: string): void {\n if (skillsDir)\n lockCache.delete(skillsDir)\n else\n lockCache.clear()\n}\n\nexport function readLock(skillsDir: string): SkilldLock | null {\n const cached = lockCache.get(skillsDir)\n if (cached)\n return cached\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n if (!existsSync(lockPath))\n return null\n const content = readFileSync(lockPath, 'utf-8')\n\n const skills: Record<string, SkillInfo> = {}\n let currentSkill: string | null = null\n\n for (const line of content.split('\\n')) {\n const skillMatch = line.match(/^ {2}(\\S+):$/)\n if (skillMatch) {\n currentSkill = skillMatch[1]!\n skills[currentSkill] = {}\n continue\n }\n if (currentSkill && line.startsWith(' ')) {\n const kv = yamlParseKV(line)\n if (kv && isSkillInfoKey(kv[0]))\n skills[currentSkill]![kv[0]] = kv[1]\n }\n }\n const lock = { skills }\n lockCache.set(skillsDir, lock)\n return { skills: { ...lock.skills } }\n}\n\nfunction serializeLock(lock: SkilldLock): string {\n let yaml = 'skills:\\n'\n for (const [name, skill] of Object.entries(lock.skills)) {\n yaml += ` ${name}:\\n`\n for (const key of SKILL_FM_KEYS) {\n if (skill[key])\n yaml += ` ${key}: ${yamlEscape(skill[key])}\\n`\n }\n }\n return yaml\n}\n\nexport function writeLock(skillsDir: string, skillName: string, info: SkillInfo): void {\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n let lock: SkilldLock = { skills: {} }\n if (existsSync(lockPath)) {\n lock = readLock(skillsDir) || { skills: {} }\n }\n\n const existing = lock.skills[skillName]\n if (existing && info.packageName) {\n // Merge packages list\n const existingPkgs = parsePackages(existing.packages)\n // Also include existing primary if not yet in packages list\n if (existing.packageName && !existingPkgs.some(p => p.name === existing.packageName)) {\n existingPkgs.unshift({ name: existing.packageName, version: existing.version || '' })\n }\n // Add/update new package\n const idx = existingPkgs.findIndex(p => p.name === info.packageName)\n if (idx >= 0) {\n existingPkgs[idx]!.version = info.version || ''\n }\n else {\n existingPkgs.push({ name: info.packageName, version: info.version || '' })\n }\n info.packages = serializePackages(existingPkgs)\n // Keep primary as first package\n info.packageName = existingPkgs[0]!.name\n info.version = existingPkgs[0]!.version\n // Preserve fields from existing entry that aren't in new info\n if (!info.repo && existing.repo)\n info.repo = existing.repo\n if (!info.source && existing.source)\n info.source = existing.source\n if (!info.generator && existing.generator)\n info.generator = existing.generator\n }\n\n lock.skills[skillName] = info\n writeFileSync(lockPath, serializeLock(lock))\n invalidateLockCache(skillsDir)\n}\n\n/**\n * Merge multiple lockfiles, preferring the most recently synced entry per skill.\n */\nexport function mergeLocks(locks: SkilldLock[]): SkilldLock {\n const merged: Record<string, SkillInfo> = {}\n for (const lock of locks) {\n for (const [name, info] of Object.entries(lock.skills)) {\n const existing = merged[name]\n if (!existing || (info.syncedAt && (!existing.syncedAt || info.syncedAt > existing.syncedAt)))\n merged[name] = info\n }\n }\n return { skills: merged }\n}\n\n/**\n * Sync a lockfile to all other dirs that already have a skilld-lock.yaml.\n * Only updates existing lockfiles — does not create new ones.\n */\nexport function syncLockfilesToDirs(sourceLock: SkilldLock, dirs: string[]): void {\n for (const dir of dirs) {\n const lockPath = join(dir, 'skilld-lock.yaml')\n if (!existsSync(lockPath))\n continue\n const existing = readLock(dir)\n if (!existing)\n continue\n // Merge source into existing\n const merged = mergeLocks([existing, sourceLock])\n writeFileSync(lockPath, serializeLock(merged))\n invalidateLockCache(dir)\n }\n}\n\nexport function removeLockEntry(skillsDir: string, skillName: string): void {\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n const lock = readLock(skillsDir)\n if (!lock)\n return\n\n delete lock.skills[skillName]\n\n if (Object.keys(lock.skills).length === 0) {\n unlinkSync(lockPath)\n invalidateLockCache(skillsDir)\n return\n }\n\n writeFileSync(lockPath, serializeLock(lock))\n invalidateLockCache(skillsDir)\n}\n"],"mappings":";;;;;AAsBA,SAAgB,cAAc,UAA6D;AACzF,KAAI,CAAC,SACH,QAAO,EAAE;AACX,QAAO,SAAS,MAAM,IAAI,CAAC,KAAK,MAAM;EACpC,MAAM,UAAU,EAAE,MAAM;EACxB,MAAM,QAAQ,QAAQ,YAAY,IAAI;AACtC,MAAI,SAAS,EACX,QAAO;GAAE,MAAM;GAAS,SAAS;GAAI;AACvC,SAAO;GAAE,MAAM,QAAQ,MAAM,GAAG,MAAM;GAAE,SAAS,QAAQ,MAAM,QAAQ,EAAA;GAAI;GAC3E,CAAC,QAAO,MAAK,EAAE,KAAK;;AAGxB,SAAgB,kBAAkB,MAAwD;AACxF,QAAO,KAAK,KAAI,MAAK,GAAG,EAAE,KAAK,GAAG,EAAE,UAAU,CAAC,KAAK,KAAK;;AAO3D,MAAM,gBAAqC;CAAC;CAAe;CAAW;CAAY;CAAQ;CAAU;CAAY;CAAa;CAAQ;CAAO;CAAS;AAErJ,SAAS,eAAe,KAAqC;AAC3D,QAAQ,cAAoC,SAAS,IAAI;;AAG3D,SAAgB,sBAAsB,WAAqC;AACzE,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;CAET,MAAM,KAAK,iBADK,aAAa,WAAW,QAAQ,CACZ;AACpC,KAAI,OAAO,KAAK,GAAG,CAAC,WAAW,EAC7B,QAAO;CAET,MAAM,OAAkB,EAAE;AAC1B,MAAK,MAAM,OAAO,cAChB,KAAI,GAAG,KACL,MAAK,OAAO,GAAG;AAEnB,QAAO;;AAGT,MAAM,4BAAY,IAAI,KAAyB;AAE/C,SAAgB,oBAAoB,WAA0B;AAC5D,KAAI,UACF,WAAU,OAAO,UAAU;KAE3B,WAAU,OAAO;;AAGrB,SAAgB,SAAS,WAAsC;CAC7D,MAAM,SAAS,UAAU,IAAI,UAAU;AACvC,KAAI,OACF,QAAO;CACT,MAAM,WAAW,KAAK,WAAW,mBAAmB;AACpD,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;CACT,MAAM,UAAU,aAAa,UAAU,QAAQ;CAE/C,MAAM,SAAoC,EAAE;CAC5C,IAAI,eAA8B;AAElC,MAAK,MAAM,QAAQ,QAAQ,MAAM,KAAK,EAAE;EACtC,MAAM,aAAa,KAAK,MAAM,eAAe;AAC7C,MAAI,YAAY;AACd,kBAAe,WAAW;AAC1B,UAAO,gBAAgB,EAAE;AACzB;;AAEF,MAAI,gBAAgB,KAAK,WAAW,OAAO,EAAE;GAC3C,MAAM,KAAK,YAAY,KAAK;AAC5B,OAAI,MAAM,eAAe,GAAG,GAAG,CAC7B,QAAO,cAAe,GAAG,MAAM,GAAG;;;CAGxC,MAAM,OAAO,EAAE,QAAQ;AACvB,WAAU,IAAI,WAAW,KAAK;AAC9B,QAAO,EAAE,QAAQ,EAAE,GAAG,KAAK,QAAQ,EAAE;;AAGvC,SAAS,cAAc,MAA0B;CAC/C,IAAI,OAAO;AACX,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,KAAK,OAAO,EAAE;AACvD,UAAQ,KAAK,KAAK;AAClB,OAAK,MAAM,OAAO,cAChB,KAAI,MAAM,KACR,SAAQ,OAAO,IAAI,IAAI,WAAW,MAAM,KAAK,CAAC;;AAGpD,QAAO;;AAGT,SAAgB,UAAU,WAAmB,WAAmB,MAAuB;CACrF,MAAM,WAAW,KAAK,WAAW,mBAAmB;CACpD,IAAI,OAAmB,EAAE,QAAQ,EAAE,EAAE;AACrC,KAAI,WAAW,SAAS,CACtB,QAAO,SAAS,UAAU,IAAI,EAAE,QAAQ,EAAE,EAAE;CAG9C,MAAM,WAAW,KAAK,OAAO;AAC7B,KAAI,YAAY,KAAK,aAAa;EAEhC,MAAM,eAAe,cAAc,SAAS,SAAS;AAErD,MAAI,SAAS,eAAe,CAAC,aAAa,MAAK,MAAK,EAAE,SAAS,SAAS,YAAY,CAClF,cAAa,QAAQ;GAAE,MAAM,SAAS;GAAa,SAAS,SAAS,WAAW;GAAI,CAAC;EAGvF,MAAM,MAAM,aAAa,WAAU,MAAK,EAAE,SAAS,KAAK,YAAY;AACpE,MAAI,OAAO,EACT,cAAa,KAAM,UAAU,KAAK,WAAW;MAG7C,cAAa,KAAK;GAAE,MAAM,KAAK;GAAa,SAAS,KAAK,WAAW;GAAI,CAAC;AAE5E,OAAK,WAAW,kBAAkB,aAAa;AAE/C,OAAK,cAAc,aAAa,GAAI;AACpC,OAAK,UAAU,aAAa,GAAI;AAEhC,MAAI,CAAC,KAAK,QAAQ,SAAS,KACzB,MAAK,OAAO,SAAS;AACvB,MAAI,CAAC,KAAK,UAAU,SAAS,OAC3B,MAAK,SAAS,SAAS;AACzB,MAAI,CAAC,KAAK,aAAa,SAAS,UAC9B,MAAK,YAAY,SAAS;;AAG9B,MAAK,OAAO,aAAa;AACzB,eAAc,UAAU,cAAc,KAAK,CAAC;AAC5C,qBAAoB,UAAU;;;;;AAMhC,SAAgB,WAAW,OAAiC;CAC1D,MAAM,SAAoC,EAAE;AAC5C,MAAK,MAAM,QAAQ,MACjB,MAAK,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ,KAAK,OAAO,EAAE;EACtD,MAAM,WAAW,OAAO;AACxB,MAAI,CAAC,YAAa,KAAK,aAAa,CAAC,SAAS,YAAY,KAAK,WAAW,SAAS,UACjF,QAAO,QAAQ;;AAGrB,QAAO,EAAE,QAAQ,QAAQ;;;;;;AAO3B,SAAgB,oBAAoB,YAAwB,MAAsB;AAChF,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,WAAW,KAAK,KAAK,mBAAmB;AAC9C,MAAI,CAAC,WAAW,SAAS,CACvB;EACF,MAAM,WAAW,SAAS,IAAI;AAC9B,MAAI,CAAC,SACH;AAGF,gBAAc,UAAU,cADT,WAAW,CAAC,UAAU,WAAW,CAAC,CACJ,CAAC;AAC9C,sBAAoB,IAAI;;;AAI5B,SAAgB,gBAAgB,WAAmB,WAAyB;CAC1E,MAAM,WAAW,KAAK,WAAW,mBAAmB;CACpD,MAAM,OAAO,SAAS,UAAU;AAChC,KAAI,CAAC,KACH;AAEF,QAAO,KAAK,OAAO;AAEnB,KAAI,OAAO,KAAK,KAAK,OAAO,CAAC,WAAW,GAAG;AACzC,aAAW,SAAS;AACpB,sBAAoB,UAAU;AAC9B;;AAGF,eAAc,UAAU,cAAc,KAAK,CAAC;AAC5C,qBAAoB,UAAU"}
1
+ {"version":3,"file":"lockfile.mjs","names":[],"sources":["../../src/core/lockfile.ts"],"sourcesContent":["import { existsSync, readFileSync, unlinkSync, writeFileSync } from 'node:fs'\nimport { join } from 'pathe'\nimport { parseFrontmatter } from './markdown.ts'\nimport { yamlEscape, yamlParseKV } from './yaml.ts'\n\nexport interface SkillInfo {\n packageName?: string\n version?: string\n /** All tracked packages as comma-separated \"name@version\" pairs (multi-package skills) */\n packages?: string\n repo?: string\n source?: string\n syncedAt?: string\n generator?: string\n /** Skill path within repo (git-sourced skills) */\n path?: string\n /** Git ref tracked for updates */\n ref?: string\n /** Git commit SHA at install time */\n commit?: string\n}\n\nexport function parsePackages(packages?: string): Array<{ name: string, version: string }> {\n if (!packages)\n return []\n return packages.split(',').map((s) => {\n const trimmed = s.trim()\n const atIdx = trimmed.lastIndexOf('@')\n if (atIdx <= 0)\n return { name: trimmed, version: '' }\n return { name: trimmed.slice(0, atIdx), version: trimmed.slice(atIdx + 1) }\n }).filter(p => p.name)\n}\n\nexport function serializePackages(pkgs: Array<{ name: string, version: string }>): string {\n return pkgs.map(p => `${p.name}@${p.version}`).join(', ')\n}\n\nexport interface SkilldLock {\n skills: Record<string, SkillInfo>\n}\n\nconst SKILL_FM_KEYS: (keyof SkillInfo)[] = ['packageName', 'version', 'packages', 'repo', 'source', 'syncedAt', 'generator', 'path', 'ref', 'commit']\n\nfunction isSkillInfoKey(key: string): key is keyof SkillInfo {\n return (SKILL_FM_KEYS as readonly string[]).includes(key)\n}\n\nexport function parseSkillFrontmatter(skillPath: string): SkillInfo | null {\n if (!existsSync(skillPath))\n return null\n const content = readFileSync(skillPath, 'utf-8')\n const fm = parseFrontmatter(content)\n if (Object.keys(fm).length === 0)\n return null\n\n const info: SkillInfo = {}\n for (const key of SKILL_FM_KEYS) {\n if (fm[key])\n info[key] = fm[key]\n }\n return info\n}\n\nconst lockCache = new Map<string, SkilldLock>()\n\nexport function invalidateLockCache(skillsDir?: string): void {\n if (skillsDir)\n lockCache.delete(skillsDir)\n else\n lockCache.clear()\n}\n\nexport function readLock(skillsDir: string): SkilldLock | null {\n const cached = lockCache.get(skillsDir)\n if (cached)\n return { skills: { ...cached.skills } }\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n if (!existsSync(lockPath))\n return null\n const content = readFileSync(lockPath, 'utf-8')\n\n const skills: Record<string, SkillInfo> = {}\n let currentSkill: string | null = null\n\n for (const line of content.split('\\n')) {\n const skillMatch = line.match(/^ {2}(\\S+):$/)\n if (skillMatch) {\n currentSkill = skillMatch[1]!\n skills[currentSkill] = {}\n continue\n }\n if (currentSkill && line.startsWith(' ')) {\n const kv = yamlParseKV(line)\n if (kv && isSkillInfoKey(kv[0]))\n skills[currentSkill]![kv[0]] = kv[1]\n }\n }\n // Normalize legacy source values\n for (const info of Object.values(skills)) {\n if (info.source === 'npm')\n info.source = 'registry'\n }\n const lock = { skills }\n lockCache.set(skillsDir, lock)\n return { skills: { ...lock.skills } }\n}\n\nfunction serializeLock(lock: SkilldLock): string {\n let yaml = 'skills:\\n'\n for (const [name, skill] of Object.entries(lock.skills)) {\n yaml += ` ${name}:\\n`\n for (const key of SKILL_FM_KEYS) {\n if (skill[key])\n yaml += ` ${key}: ${yamlEscape(skill[key])}\\n`\n }\n }\n return yaml\n}\n\nexport function writeLock(skillsDir: string, skillName: string, info: SkillInfo): void {\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n let lock: SkilldLock = { skills: {} }\n if (existsSync(lockPath)) {\n lock = readLock(skillsDir) || { skills: {} }\n }\n\n const existing = lock.skills[skillName]\n if (existing && info.packageName) {\n // Merge packages list\n const existingPkgs = parsePackages(existing.packages)\n // Also include existing primary if not yet in packages list\n if (existing.packageName && !existingPkgs.some(p => p.name === existing.packageName)) {\n existingPkgs.unshift({ name: existing.packageName, version: existing.version || '' })\n }\n // Add/update new package\n const idx = existingPkgs.findIndex(p => p.name === info.packageName)\n if (idx >= 0) {\n existingPkgs[idx]!.version = info.version || ''\n }\n else {\n existingPkgs.push({ name: info.packageName, version: info.version || '' })\n }\n info.packages = serializePackages(existingPkgs)\n // Keep primary as first package\n info.packageName = existingPkgs[0]!.name\n info.version = existingPkgs[0]!.version\n // Preserve fields from existing entry that aren't in new info\n if (!info.repo && existing.repo)\n info.repo = existing.repo\n if (!info.source && existing.source)\n info.source = existing.source\n if (!info.generator && existing.generator)\n info.generator = existing.generator\n }\n\n lock.skills[skillName] = info\n writeFileSync(lockPath, serializeLock(lock))\n invalidateLockCache(skillsDir)\n}\n\n/**\n * Merge multiple lockfiles, preferring the most recently synced entry per skill.\n */\nexport function mergeLocks(locks: SkilldLock[]): SkilldLock {\n const merged: Record<string, SkillInfo> = {}\n for (const lock of locks) {\n for (const [name, info] of Object.entries(lock.skills)) {\n const existing = merged[name]\n if (!existing || (info.syncedAt && (!existing.syncedAt || info.syncedAt > existing.syncedAt)))\n merged[name] = info\n }\n }\n return { skills: merged }\n}\n\n/**\n * Sync a lockfile to all other dirs that already have a skilld-lock.yaml.\n * Only updates existing lockfiles — does not create new ones.\n */\nexport function syncLockfilesToDirs(sourceLock: SkilldLock, dirs: string[]): void {\n for (const dir of dirs) {\n const lockPath = join(dir, 'skilld-lock.yaml')\n if (!existsSync(lockPath))\n continue\n const existing = readLock(dir)\n if (!existing)\n continue\n // Merge source into existing\n const merged = mergeLocks([existing, sourceLock])\n writeFileSync(lockPath, serializeLock(merged))\n invalidateLockCache(dir)\n }\n}\n\nexport function removeLockEntry(skillsDir: string, skillName: string): void {\n const lockPath = join(skillsDir, 'skilld-lock.yaml')\n const lock = readLock(skillsDir)\n if (!lock)\n return\n\n delete lock.skills[skillName]\n\n if (Object.keys(lock.skills).length === 0) {\n unlinkSync(lockPath)\n invalidateLockCache(skillsDir)\n return\n }\n\n writeFileSync(lockPath, serializeLock(lock))\n invalidateLockCache(skillsDir)\n}\n"],"mappings":";;;;AAsBA,SAAgB,cAAc,UAA6D;AACzF,KAAI,CAAC,SACH,QAAO,EAAE;AACX,QAAO,SAAS,MAAM,IAAI,CAAC,KAAK,MAAM;EACpC,MAAM,UAAU,EAAE,MAAM;EACxB,MAAM,QAAQ,QAAQ,YAAY,IAAI;AACtC,MAAI,SAAS,EACX,QAAO;GAAE,MAAM;GAAS,SAAS;GAAI;AACvC,SAAO;GAAE,MAAM,QAAQ,MAAM,GAAG,MAAM;GAAE,SAAS,QAAQ,MAAM,QAAQ,EAAA;GAAI;GAC3E,CAAC,QAAO,MAAK,EAAE,KAAK;;AAGxB,SAAgB,kBAAkB,MAAwD;AACxF,QAAO,KAAK,KAAI,MAAK,GAAG,EAAE,KAAK,GAAG,EAAE,UAAU,CAAC,KAAK,KAAK;;AAO3D,MAAM,gBAAqC;CAAC;CAAe;CAAW;CAAY;CAAQ;CAAU;CAAY;CAAa;CAAQ;CAAO;CAAS;AAErJ,SAAS,eAAe,KAAqC;AAC3D,QAAQ,cAAoC,SAAS,IAAI;;AAG3D,SAAgB,sBAAsB,WAAqC;AACzE,KAAI,CAAC,WAAW,UAAU,CACxB,QAAO;CAET,MAAM,KAAK,iBADK,aAAa,WAAW,QAAQ,CACZ;AACpC,KAAI,OAAO,KAAK,GAAG,CAAC,WAAW,EAC7B,QAAO;CAET,MAAM,OAAkB,EAAE;AAC1B,MAAK,MAAM,OAAO,cAChB,KAAI,GAAG,KACL,MAAK,OAAO,GAAG;AAEnB,QAAO;;AAGT,MAAM,4BAAY,IAAI,KAAyB;AAE/C,SAAgB,oBAAoB,WAA0B;AAC5D,KAAI,UACF,WAAU,OAAO,UAAU;KAE3B,WAAU,OAAO;;AAGrB,SAAgB,SAAS,WAAsC;CAC7D,MAAM,SAAS,UAAU,IAAI,UAAU;AACvC,KAAI,OACF,QAAO,EAAE,QAAQ,EAAE,GAAG,OAAO,QAAQ,EAAE;CACzC,MAAM,WAAW,KAAK,WAAW,mBAAmB;AACpD,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;CACT,MAAM,UAAU,aAAa,UAAU,QAAQ;CAE/C,MAAM,SAAoC,EAAE;CAC5C,IAAI,eAA8B;AAElC,MAAK,MAAM,QAAQ,QAAQ,MAAM,KAAK,EAAE;EACtC,MAAM,aAAa,KAAK,MAAM,eAAe;AAC7C,MAAI,YAAY;AACd,kBAAe,WAAW;AAC1B,UAAO,gBAAgB,EAAE;AACzB;;AAEF,MAAI,gBAAgB,KAAK,WAAW,OAAO,EAAE;GAC3C,MAAM,KAAK,YAAY,KAAK;AAC5B,OAAI,MAAM,eAAe,GAAG,GAAG,CAC7B,QAAO,cAAe,GAAG,MAAM,GAAG;;;AAIxC,MAAK,MAAM,QAAQ,OAAO,OAAO,OAAO,CACtC,KAAI,KAAK,WAAW,MAClB,MAAK,SAAS;CAElB,MAAM,OAAO,EAAE,QAAQ;AACvB,WAAU,IAAI,WAAW,KAAK;AAC9B,QAAO,EAAE,QAAQ,EAAE,GAAG,KAAK,QAAQ,EAAE;;AAGvC,SAAS,cAAc,MAA0B;CAC/C,IAAI,OAAO;AACX,MAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,KAAK,OAAO,EAAE;AACvD,UAAQ,KAAK,KAAK;AAClB,OAAK,MAAM,OAAO,cAChB,KAAI,MAAM,KACR,SAAQ,OAAO,IAAI,IAAI,WAAW,MAAM,KAAK,CAAC;;AAGpD,QAAO;;AAGT,SAAgB,UAAU,WAAmB,WAAmB,MAAuB;CACrF,MAAM,WAAW,KAAK,WAAW,mBAAmB;CACpD,IAAI,OAAmB,EAAE,QAAQ,EAAE,EAAE;AACrC,KAAI,WAAW,SAAS,CACtB,QAAO,SAAS,UAAU,IAAI,EAAE,QAAQ,EAAE,EAAE;CAG9C,MAAM,WAAW,KAAK,OAAO;AAC7B,KAAI,YAAY,KAAK,aAAa;EAEhC,MAAM,eAAe,cAAc,SAAS,SAAS;AAErD,MAAI,SAAS,eAAe,CAAC,aAAa,MAAK,MAAK,EAAE,SAAS,SAAS,YAAY,CAClF,cAAa,QAAQ;GAAE,MAAM,SAAS;GAAa,SAAS,SAAS,WAAW;GAAI,CAAC;EAGvF,MAAM,MAAM,aAAa,WAAU,MAAK,EAAE,SAAS,KAAK,YAAY;AACpE,MAAI,OAAO,EACT,cAAa,KAAM,UAAU,KAAK,WAAW;MAG7C,cAAa,KAAK;GAAE,MAAM,KAAK;GAAa,SAAS,KAAK,WAAW;GAAI,CAAC;AAE5E,OAAK,WAAW,kBAAkB,aAAa;AAE/C,OAAK,cAAc,aAAa,GAAI;AACpC,OAAK,UAAU,aAAa,GAAI;AAEhC,MAAI,CAAC,KAAK,QAAQ,SAAS,KACzB,MAAK,OAAO,SAAS;AACvB,MAAI,CAAC,KAAK,UAAU,SAAS,OAC3B,MAAK,SAAS,SAAS;AACzB,MAAI,CAAC,KAAK,aAAa,SAAS,UAC9B,MAAK,YAAY,SAAS;;AAG9B,MAAK,OAAO,aAAa;AACzB,eAAc,UAAU,cAAc,KAAK,CAAC;AAC5C,qBAAoB,UAAU;;;;AAMhC,MAAA,MAAgB,QAAW,MAAiC,MAAA,MAAA,CAAA,MAAA,SAAA,OAAA,QAAA,KAAA,OAAA,EAAA;EAC1D,MAAM,WAAsC,OAAA;AAC5C,MAAK,CAAA,YAAM,KAAQ,aACZ,CAAM,SAAO,YAAS,KAAO,WAAa,SAAS,UAAA,QAAA,QAAA;;AAEtD,QAAK,EAAA,QAAA,QAAkB;;;;;;;AAW7B,MAAA,CAAA,SAAgB;AACd,gBAAW,UAAa,cAAA,WAAA,CAAA,UAAA,WAAA,CAAA,CAAA,CAAA;AACtB,sBAAiB,IAAK;;;AAItB,SAAK,gBACH,WAAA,WAAA;CAGF,MAAA,WAAc,KAAA,WAAU,mBADG;CAE3B,MAAA,OAAA,SAAoB,UAAI;;;AAI5B,KAAA,OAAgB,KAAA,KAAA,OAAgB,CAAA,WAAmB,GAAA;AACjD,aAAM,SAAW;AACjB,sBAAa,UAAS;AACtB;;AAKA,eAAW,UAAU,cAAQ,KAAc,CAAA;AACzC,qBAAW,UAAS"}
@@ -4,8 +4,6 @@ import { frontmatterFromMarkdown } from "mdast-util-frontmatter";
4
4
  import { toString } from "mdast-util-to-string";
5
5
  import { frontmatter } from "micromark-extension-frontmatter";
6
6
  import { visit } from "unist-util-visit";
7
- //#region src/core/markdown.ts
8
- /** Parse markdown string to AST + frontmatter key-values */
9
7
  function parseMd(content) {
10
8
  const tree = fromMarkdown(content, {
11
9
  extensions: [frontmatter(["yaml"])],
@@ -23,15 +21,12 @@ function parseMd(content) {
23
21
  frontmatter: fm
24
22
  };
25
23
  }
26
- /** Extract frontmatter key-value pairs only */
27
24
  function parseFrontmatter(content) {
28
25
  return parseMd(content).frontmatter;
29
26
  }
30
- /** Strip custom heading anchors like {#some-id} */
31
27
  function stripHeadingAnchors(text) {
32
28
  return text.replace(/\s*\{#[^}]+\}\s*$/, "").trim();
33
29
  }
34
- /** Extract title: frontmatter title > first h1 > null */
35
30
  function extractTitle(content) {
36
31
  const { tree, frontmatter: fm } = parseMd(content);
37
32
  if (fm.title) return fm.title;
@@ -44,7 +39,6 @@ function extractTitle(content) {
44
39
  });
45
40
  return title;
46
41
  }
47
- /** Extract first paragraph text, 150 char max */
48
42
  function extractDescription(content) {
49
43
  const { tree } = parseMd(content);
50
44
  let desc = null;
@@ -58,7 +52,6 @@ function extractDescription(content) {
58
52
  });
59
53
  return desc;
60
54
  }
61
- /** Extract all links (deduped by url) */
62
55
  function extractLinks(content) {
63
56
  const { tree } = parseMd(content);
64
57
  const links = [];
@@ -74,12 +67,10 @@ function extractLinks(content) {
74
67
  });
75
68
  return links;
76
69
  }
77
- /** Strip frontmatter block, return body only */
78
70
  function stripFrontmatter(content) {
79
71
  const match = content.match(/^---\r?\n[\s\S]*?\r?\n---\r?\n/);
80
72
  return match ? content.slice(match[0].length).trim() : content;
81
73
  }
82
- //#endregion
83
74
  export { stripFrontmatter as a, parseFrontmatter as i, extractLinks as n, extractTitle as r, extractDescription as t };
84
75
 
85
76
  //# sourceMappingURL=markdown.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"markdown.mjs","names":[],"sources":["../../src/core/markdown.ts"],"sourcesContent":["/**\n * AST-based markdown parsing using mdast/micromark.\n * Replaces scattered regex-based frontmatter/heading/link extraction.\n */\n\nimport type { Nodes, Root } from 'mdast'\nimport { fromMarkdown } from 'mdast-util-from-markdown'\nimport { frontmatterFromMarkdown } from 'mdast-util-frontmatter'\nimport { toString } from 'mdast-util-to-string'\nimport { frontmatter } from 'micromark-extension-frontmatter'\nimport { visit } from 'unist-util-visit'\nimport { yamlParseKV } from './yaml.ts'\n\nexport interface MdHeading {\n depth: number\n text: string\n}\n\nexport interface MdLink {\n title: string\n url: string\n}\n\nexport interface ParsedMd {\n tree: Root\n frontmatter: Record<string, string>\n}\n\n/** Parse markdown string to AST + frontmatter key-values */\nexport function parseMd(content: string): ParsedMd {\n const tree = fromMarkdown(content, {\n extensions: [frontmatter(['yaml'])],\n mdastExtensions: [frontmatterFromMarkdown(['yaml'])],\n })\n\n const fm: Record<string, string> = {}\n visit(tree, 'yaml', (node: Nodes) => {\n if (node.type === 'yaml') {\n for (const line of (node as any).value.split('\\n')) {\n const kv = yamlParseKV(line)\n if (kv)\n fm[kv[0]] = kv[1]\n }\n }\n })\n\n return { tree, frontmatter: fm }\n}\n\n/** Extract frontmatter key-value pairs only */\nexport function parseFrontmatter(content: string): Record<string, string> {\n return parseMd(content).frontmatter\n}\n\n/** Strip custom heading anchors like {#some-id} */\nfunction stripHeadingAnchors(text: string): string {\n return text.replace(/\\s*\\{#[^}]+\\}\\s*$/, '').trim()\n}\n\n/** Extract title: frontmatter title > first h1 > null */\nexport function extractTitle(content: string): string | null {\n const { tree, frontmatter: fm } = parseMd(content)\n if (fm.title)\n return fm.title\n\n let title: string | null = null\n visit(tree, 'heading', (node) => {\n if (node.depth === 1 && !title) {\n // Strip {#id} anchors and leading backslash escapes (e.g. `# \\`)\n const text = stripHeadingAnchors(toString(node)).replace(/^\\\\+\\s*/, '').trim()\n if (text.length > 0)\n title = text\n }\n })\n\n return title\n}\n\n/** Extract first paragraph text, 150 char max */\nexport function extractDescription(content: string): string | null {\n const { tree } = parseMd(content)\n\n let desc: string | null = null\n visit(tree, 'paragraph', (node, _index, parent) => {\n // Only top-level paragraphs (skip blockquote children, list items, etc.)\n if (desc || parent?.type !== 'root')\n return\n\n const text = toString(node).trim()\n if (text.length === 0)\n return\n\n // Strip markdown link syntax remnants and formatting chars\n let clean = text.replace(/\\[([^\\]]+)\\]\\([^)]+\\)/g, '$1').replace(/[`*_~]/g, '')\n if (clean.length > 150)\n clean = `${clean.slice(0, 147)}...`\n desc = clean\n })\n\n return desc\n}\n\n/** Extract all headings with depth and text */\nexport function extractHeadings(content: string): MdHeading[] {\n const { tree } = parseMd(content)\n const headings: MdHeading[] = []\n\n visit(tree, 'heading', (node) => {\n headings.push({ depth: node.depth, text: stripHeadingAnchors(toString(node)) })\n })\n\n return headings\n}\n\n/** Extract all links (deduped by url) */\nexport function extractLinks(content: string): MdLink[] {\n const { tree } = parseMd(content)\n const links: MdLink[] = []\n const seen = new Set<string>()\n\n visit(tree, 'link', (node) => {\n if (!seen.has(node.url)) {\n seen.add(node.url)\n links.push({ title: toString(node), url: node.url })\n }\n })\n\n return links\n}\n\n/** Strip frontmatter block, return body only */\nexport function stripFrontmatter(content: string): string {\n const match = content.match(/^---\\r?\\n[\\s\\S]*?\\r?\\n---\\r?\\n/)\n return match ? content.slice(match[0].length).trim() : content\n}\n"],"mappings":";;;;;;;;AA6BA,SAAgB,QAAQ,SAA2B;CACjD,MAAM,OAAO,aAAa,SAAS;EACjC,YAAY,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;EACnC,iBAAiB,CAAC,wBAAwB,CAAC,OAAO,CAAC,CAAA;EACpD,CAAC;CAEF,MAAM,KAA6B,EAAE;AACrC,OAAM,MAAM,SAAS,SAAgB;AACnC,MAAI,KAAK,SAAS,OAChB,MAAK,MAAM,QAAS,KAAa,MAAM,MAAM,KAAK,EAAE;GAClD,MAAM,KAAK,YAAY,KAAK;AAC5B,OAAI,GACF,IAAG,GAAG,MAAM,GAAG;;GAGrB;AAEF,QAAO;EAAE;EAAM,aAAa;EAAI;;;AAIlC,SAAgB,iBAAiB,SAAyC;AACxE,QAAO,QAAQ,QAAQ,CAAC;;;AAI1B,SAAS,oBAAoB,MAAsB;AACjD,QAAO,KAAK,QAAQ,qBAAqB,GAAG,CAAC,MAAM;;;AAIrD,SAAgB,aAAa,SAAgC;CAC3D,MAAM,EAAE,MAAM,aAAa,OAAO,QAAQ,QAAQ;AAClD,KAAI,GAAG,MACL,QAAO,GAAG;CAEZ,IAAI,QAAuB;AAC3B,OAAM,MAAM,YAAY,SAAS;AAC/B,MAAI,KAAK,UAAU,KAAK,CAAC,OAAO;GAE9B,MAAM,OAAO,oBAAoB,SAAS,KAAK,CAAC,CAAC,QAAQ,WAAW,GAAG,CAAC,MAAM;AAC9E,OAAI,KAAK,SAAS,EAChB,SAAQ;;GAEZ;AAEF,QAAO;;;AAIT,SAAgB,mBAAmB,SAAgC;CACjE,MAAM,EAAE,SAAS,QAAQ,QAAQ;CAEjC,IAAI,OAAsB;AAC1B,OAAM,MAAM,cAAc,MAAM,QAAQ,WAAW;AAEjD,MAAI,QAAQ,QAAQ,SAAS,OAC3B;EAEF,MAAM,OAAO,SAAS,KAAK,CAAC,MAAM;AAClC,MAAI,KAAK,WAAW,EAClB;EAGF,IAAI,QAAQ,KAAK,QAAQ,0BAA0B,KAAK,CAAC,QAAQ,WAAW,GAAG;AAC/E,MAAI,MAAM,SAAS,IACjB,SAAQ,GAAG,MAAM,MAAM,GAAG,IAAI,CAAC;AACjC,SAAO;GACP;AAEF,QAAO;;;AAgBT,SAAgB,aAAa,SAA2B;CACtD,MAAM,EAAE,SAAS,QAAQ,QAAQ;CACjC,MAAM,QAAkB,EAAE;CAC1B,MAAM,uBAAO,IAAI,KAAa;AAE9B,OAAM,MAAM,SAAS,SAAS;AAC5B,MAAI,CAAC,KAAK,IAAI,KAAK,IAAI,EAAE;AACvB,QAAK,IAAI,KAAK,IAAI;AAClB,SAAM,KAAK;IAAE,OAAO,SAAS,KAAK;IAAE,KAAK,KAAK;IAAK,CAAC;;GAEtD;AAEF,QAAO;;;AAIT,SAAgB,iBAAiB,SAAyB;CACxD,MAAM,QAAQ,QAAQ,MAAM,iCAAiC;AAC7D,QAAO,QAAQ,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG"}
1
+ {"version":3,"file":"markdown.mjs","names":[],"sources":["../../src/core/markdown.ts"],"sourcesContent":["/**\n * AST-based markdown parsing using mdast/micromark.\n * Replaces scattered regex-based frontmatter/heading/link extraction.\n */\n\nimport type { Nodes, Root } from 'mdast'\nimport { fromMarkdown } from 'mdast-util-from-markdown'\nimport { frontmatterFromMarkdown } from 'mdast-util-frontmatter'\nimport { toString } from 'mdast-util-to-string'\nimport { frontmatter } from 'micromark-extension-frontmatter'\nimport { visit } from 'unist-util-visit'\nimport { yamlParseKV } from './yaml.ts'\n\nexport interface MdHeading {\n depth: number\n text: string\n}\n\nexport interface MdLink {\n title: string\n url: string\n}\n\nexport interface ParsedMd {\n tree: Root\n frontmatter: Record<string, string>\n}\n\n/** Parse markdown string to AST + frontmatter key-values */\nexport function parseMd(content: string): ParsedMd {\n const tree = fromMarkdown(content, {\n extensions: [frontmatter(['yaml'])],\n mdastExtensions: [frontmatterFromMarkdown(['yaml'])],\n })\n\n const fm: Record<string, string> = {}\n visit(tree, 'yaml', (node: Nodes) => {\n if (node.type === 'yaml') {\n for (const line of (node as any).value.split('\\n')) {\n const kv = yamlParseKV(line)\n if (kv)\n fm[kv[0]] = kv[1]\n }\n }\n })\n\n return { tree, frontmatter: fm }\n}\n\n/** Extract frontmatter key-value pairs only */\nexport function parseFrontmatter(content: string): Record<string, string> {\n return parseMd(content).frontmatter\n}\n\n/** Strip custom heading anchors like {#some-id} */\nfunction stripHeadingAnchors(text: string): string {\n return text.replace(/\\s*\\{#[^}]+\\}\\s*$/, '').trim()\n}\n\n/** Extract title: frontmatter title > first h1 > null */\nexport function extractTitle(content: string): string | null {\n const { tree, frontmatter: fm } = parseMd(content)\n if (fm.title)\n return fm.title\n\n let title: string | null = null\n visit(tree, 'heading', (node) => {\n if (node.depth === 1 && !title) {\n // Strip {#id} anchors and leading backslash escapes (e.g. `# \\`)\n const text = stripHeadingAnchors(toString(node)).replace(/^\\\\+\\s*/, '').trim()\n if (text.length > 0)\n title = text\n }\n })\n\n return title\n}\n\n/** Extract first paragraph text, 150 char max */\nexport function extractDescription(content: string): string | null {\n const { tree } = parseMd(content)\n\n let desc: string | null = null\n visit(tree, 'paragraph', (node, _index, parent) => {\n // Only top-level paragraphs (skip blockquote children, list items, etc.)\n if (desc || parent?.type !== 'root')\n return\n\n const text = toString(node).trim()\n if (text.length === 0)\n return\n\n // Strip markdown link syntax remnants and formatting chars\n let clean = text.replace(/\\[([^\\]]+)\\]\\([^)]+\\)/g, '$1').replace(/[`*_~]/g, '')\n if (clean.length > 150)\n clean = `${clean.slice(0, 147)}...`\n desc = clean\n })\n\n return desc\n}\n\n/** Extract all headings with depth and text */\nexport function extractHeadings(content: string): MdHeading[] {\n const { tree } = parseMd(content)\n const headings: MdHeading[] = []\n\n visit(tree, 'heading', (node) => {\n headings.push({ depth: node.depth, text: stripHeadingAnchors(toString(node)) })\n })\n\n return headings\n}\n\n/** Extract all links (deduped by url) */\nexport function extractLinks(content: string): MdLink[] {\n const { tree } = parseMd(content)\n const links: MdLink[] = []\n const seen = new Set<string>()\n\n visit(tree, 'link', (node) => {\n if (!seen.has(node.url)) {\n seen.add(node.url)\n links.push({ title: toString(node), url: node.url })\n }\n })\n\n return links\n}\n\n/** Strip frontmatter block, return body only */\nexport function stripFrontmatter(content: string): string {\n const match = content.match(/^---\\r?\\n[\\s\\S]*?\\r?\\n---\\r?\\n/)\n return match ? content.slice(match[0].length).trim() : content\n}\n"],"mappings":";;;;;;AA6BA,SAAgB,QAAQ,SAA2B;CACjD,MAAM,OAAO,aAAa,SAAS;EACjC,YAAY,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;EACnC,iBAAiB,CAAC,wBAAwB,CAAC,OAAO,CAAC,CAAA;EACpD,CAAC;CAEF,MAAM,KAA6B,EAAE;AACrC,OAAM,MAAM,SAAS,SAAgB;AACnC,MAAI,KAAK,SAAS,OAChB,MAAK,MAAM,QAAS,KAAa,MAAM,MAAM,KAAK,EAAE;GAClD,MAAM,KAAK,YAAY,KAAK;AAC5B,OAAI,GACF,IAAG,GAAG,MAAM,GAAG;;GAGrB;AAEF,QAAO;EAAE;EAAM,aAAa;EAAI;;AAIlC,SAAgB,iBAAiB,SAAyC;AACxE,QAAO,QAAQ,QAAQ,CAAC;;AAI1B,SAAS,oBAAoB,MAAsB;AACjD,QAAO,KAAK,QAAQ,qBAAqB,GAAG,CAAC,MAAM;;AAIrD,SAAgB,aAAa,SAAgC;CAC3D,MAAM,EAAE,MAAM,aAAa,OAAO,QAAQ,QAAQ;AAClD,KAAI,GAAG,MACL,QAAO,GAAG;CAEZ,IAAI,QAAuB;AAC3B,OAAM,MAAM,YAAY,SAAS;AAC/B,MAAI,KAAK,UAAU,KAAK,CAAC,OAAO;GAE9B,MAAM,OAAO,oBAAoB,SAAS,KAAK,CAAC,CAAC,QAAQ,WAAW,GAAG,CAAC,MAAM;AAC9E,OAAI,KAAK,SAAS,EAChB,SAAQ;;GAEZ;AAEF,QAAO;;AAIT,SAAgB,mBAAmB,SAAgC;CACjE,MAAM,EAAE,SAAS,QAAQ,QAAQ;CAEjC,IAAI,OAAsB;AAC1B,OAAM,MAAM,cAAc,MAAM,QAAQ,WAAW;AAEjD,MAAI,QAAQ,QAAQ,SAAS,OAC3B;EAEF,MAAM,OAAO,SAAS,KAAK,CAAC,MAAM;AAClC,MAAI,KAAK,WAAW,EAClB;EAGF,IAAI,QAAQ,KAAK,QAAQ,0BAA0B,KAAK,CAAC,QAAQ,WAAW,GAAG;AAC/E,MAAI,MAAM,SAAS,IACjB,SAAQ,GAAG,MAAM,MAAM,GAAG,IAAI,CAAC;AACjC,SAAO;GACP;AAEF,QAAO;;AAgBT,SAAgB,aAAa,SAA2B;CACtD,MAAM,EAAE,SAAS,QAAQ,QAAQ;CACjC,MAAM,QAAkB,EAAE;CAC1B,MAAM,uBAAO,IAAI,KAAa;AAE9B,OAAM,MAAM,SAAS,SAAS;AAC5B,MAAI,CAAC,KAAK,IAAI,KAAK,IAAI,EAAE;AACvB,QAAK,IAAI,KAAK,IAAI;AAClB,SAAM,KAAK;IAAE,OAAO,SAAS,KAAK;IAAE,KAAK,KAAK;IAAK,CAAC;;GAEtD;AAEF,QAAO;;AAIT,SAAgB,iBAAiB,SAAyB;CACxD,MAAM,QAAQ,QAAQ,MAAM,iCAAiC;AAC7D,QAAO,QAAQ,QAAQ,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG"}
@@ -1,15 +1,10 @@
1
1
  import { existsSync, readFileSync, writeFileSync } from "node:fs";
2
2
  import { applyEdits, modify, parseTree } from "jsonc-parser";
3
- //#region src/core/package-json.ts
4
3
  const defaultEditOptions = {
5
4
  tabSize: 2,
6
5
  insertSpaces: true
7
6
  };
8
7
  const cache = /* @__PURE__ */ new Map();
9
- /**
10
- * Read and parse a package.json, returning cached result on repeat calls.
11
- * Throws if the file does not exist.
12
- */
13
8
  function readPackageJson(pkgPath) {
14
9
  const hit = cache.get(pkgPath);
15
10
  if (hit) return hit;
@@ -21,9 +16,6 @@ function readPackageJson(pkgPath) {
21
16
  cache.set(pkgPath, entry);
22
17
  return entry;
23
18
  }
24
- /**
25
- * Same as readPackageJson but returns null when the file is missing or unparseable.
26
- */
27
19
  function readPackageJsonSafe(pkgPath) {
28
20
  if (cache.has(pkgPath)) return cache.get(pkgPath);
29
21
  if (!existsSync(pkgPath)) return null;
@@ -33,16 +25,9 @@ function readPackageJsonSafe(pkgPath) {
33
25
  return null;
34
26
  }
35
27
  }
36
- /**
37
- * Drop any cached entry so the next read hits disk.
38
- */
39
28
  function invalidatePackageJson(pkgPath) {
40
29
  cache.delete(pkgPath);
41
30
  }
42
- /**
43
- * Set a value at a JSON path, preserving all surrounding formatting.
44
- * Returns the modified file content as a string.
45
- */
46
31
  function editJsonProperty(raw, path, value, options) {
47
32
  const opts = {
48
33
  ...defaultEditOptions,
@@ -53,11 +38,6 @@ function editJsonProperty(raw, path, value, options) {
53
38
  insertSpaces: opts.insertSpaces
54
39
  } }));
55
40
  }
56
- /**
57
- * Read a package.json, apply an edit function, write it back, and invalidate the cache.
58
- * The edit function receives the raw text and parsed object,
59
- * and returns the new raw text (or null to skip writing).
60
- */
61
41
  function patchPackageJson(pkgPath, editFn) {
62
42
  const { raw, parsed } = readPackageJson(pkgPath);
63
43
  const result = editFn(raw, parsed);
@@ -66,10 +46,6 @@ function patchPackageJson(pkgPath, editFn) {
66
46
  invalidatePackageJson(pkgPath);
67
47
  return true;
68
48
  }
69
- /**
70
- * Append a value to a JSON array at the given path, preserving formatting.
71
- * Inserts in sorted order if the array contains strings.
72
- */
73
49
  function appendToJsonArray(raw, path, value, options) {
74
50
  const opts = {
75
51
  ...defaultEditOptions,
@@ -101,7 +77,6 @@ function appendToJsonArray(raw, path, value, options) {
101
77
  isArrayInsertion: true
102
78
  }));
103
79
  }
104
- //#endregion
105
80
  export { readPackageJsonSafe as i, editJsonProperty as n, patchPackageJson as r, appendToJsonArray as t };
106
81
 
107
82
  //# sourceMappingURL=package-json.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"package-json.mjs","names":[],"sources":["../../src/core/package-json.ts"],"sourcesContent":["import { existsSync, readFileSync, writeFileSync } from 'node:fs'\nimport { applyEdits, modify, parseTree } from 'jsonc-parser'\n\nexport interface EditOptions {\n /** Formatting options for inserted content */\n tabSize?: number\n insertSpaces?: boolean\n}\n\nconst defaultEditOptions: EditOptions = { tabSize: 2, insertSpaces: true }\n\n// ── Cached reader ──────────────────────────────────────────────\n\nconst cache = new Map<string, { raw: string, parsed: Record<string, unknown> }>()\n\n/**\n * Read and parse a package.json, returning cached result on repeat calls.\n * Throws if the file does not exist.\n */\nexport function readPackageJson(pkgPath: string): { raw: string, parsed: Record<string, unknown> } {\n const hit = cache.get(pkgPath)\n if (hit)\n return hit\n const raw = readFileSync(pkgPath, 'utf-8')\n const parsed = JSON.parse(raw) as Record<string, unknown>\n const entry = { raw, parsed }\n cache.set(pkgPath, entry)\n return entry\n}\n\n/**\n * Same as readPackageJson but returns null when the file is missing or unparseable.\n */\nexport function readPackageJsonSafe(pkgPath: string): { raw: string, parsed: Record<string, unknown> } | null {\n if (cache.has(pkgPath))\n return cache.get(pkgPath)!\n if (!existsSync(pkgPath))\n return null\n try {\n return readPackageJson(pkgPath)\n }\n catch {\n return null\n }\n}\n\n/**\n * Drop any cached entry so the next read hits disk.\n */\nexport function invalidatePackageJson(pkgPath: string): void {\n cache.delete(pkgPath)\n}\n\n/**\n * Clear all cached entries. Useful in tests.\n */\nexport function clearPackageJsonCache(): void {\n cache.clear()\n}\n\n// ── JSON editing helpers ───────────────────────────────────────\n\n/**\n * Set a value at a JSON path, preserving all surrounding formatting.\n * Returns the modified file content as a string.\n */\nexport function editJsonProperty(raw: string, path: (string | number)[], value: unknown, options?: EditOptions): string {\n const opts = { ...defaultEditOptions, ...options }\n const edits = modify(raw, path, value, {\n formattingOptions: { tabSize: opts.tabSize!, insertSpaces: opts.insertSpaces! },\n })\n return applyEdits(raw, edits)\n}\n\n/**\n * Remove a value at a JSON path, preserving all surrounding formatting.\n */\nexport function removeJsonProperty(raw: string, path: (string | number)[]): string {\n const edits = modify(raw, path, undefined, {})\n return applyEdits(raw, edits)\n}\n\n/**\n * Read a package.json, apply an edit function, write it back, and invalidate the cache.\n * The edit function receives the raw text and parsed object,\n * and returns the new raw text (or null to skip writing).\n */\nexport function patchPackageJson(\n pkgPath: string,\n editFn: (raw: string, pkg: Record<string, unknown>) => string | null,\n): boolean {\n const { raw, parsed } = readPackageJson(pkgPath)\n const result = editFn(raw, parsed)\n if (result === null)\n return false\n writeFileSync(pkgPath, result)\n invalidatePackageJson(pkgPath)\n return true\n}\n\n/**\n * Append a value to a JSON array at the given path, preserving formatting.\n * Inserts in sorted order if the array contains strings.\n */\nexport function appendToJsonArray(raw: string, path: string[], value: string, options?: EditOptions): string {\n const opts = { ...defaultEditOptions, ...options }\n const tree = parseTree(raw)\n if (!tree)\n return editJsonProperty(raw, path, [value], opts)\n\n // Walk to the target array node\n let node = tree\n for (const key of path) {\n const child = node.children?.find(c =>\n c.type === 'property' && c.children?.[0]?.value === key,\n )\n if (!child?.children?.[1])\n return editJsonProperty(raw, path, [value], opts)\n node = child.children[1]\n }\n\n if (node.type !== 'array' || !node.children)\n return editJsonProperty(raw, path, [value], opts)\n\n // Find sorted insertion index (only for string-only arrays)\n const allStrings = node.children.every(c => typeof c.value === 'string')\n let idx = node.children.length\n if (allStrings) {\n const items = node.children.map(c => c.value as string)\n for (let i = 0; i < items.length; i++) {\n if (value.localeCompare(items[i]!) < 0) {\n idx = i\n break\n }\n }\n }\n\n const edits = modify(raw, [...path, idx], value, {\n formattingOptions: { tabSize: opts.tabSize!, insertSpaces: opts.insertSpaces! },\n isArrayInsertion: true,\n })\n return applyEdits(raw, edits)\n}\n"],"mappings":";;;AASA,MAAM,qBAAkC;CAAE,SAAS;CAAG,cAAc;CAAM;AAI1E,MAAM,wBAAQ,IAAI,KAA+D;;;;;AAMjF,SAAgB,gBAAgB,SAAmE;CACjG,MAAM,MAAM,MAAM,IAAI,QAAQ;AAC9B,KAAI,IACF,QAAO;CACT,MAAM,MAAM,aAAa,SAAS,QAAQ;CAE1C,MAAM,QAAQ;EAAE;EAAK,QADN,KAAK,MAAM,IAAA;EACG;AAC7B,OAAM,IAAI,SAAS,MAAM;AACzB,QAAO;;;;;AAMT,SAAgB,oBAAoB,SAA0E;AAC5G,KAAI,MAAM,IAAI,QAAQ,CACpB,QAAO,MAAM,IAAI,QAAQ;AAC3B,KAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;AACT,KAAI;AACF,SAAO,gBAAgB,QAAQ;SAE3B;AACJ,SAAO;;;;;;AAOX,SAAgB,sBAAsB,SAAuB;AAC3D,OAAM,OAAO,QAAQ;;;;;;AAgBvB,SAAgB,iBAAiB,KAAa,MAA2B,OAAgB,SAA+B;CACtH,MAAM,OAAO;EAAE,GAAG;EAAoB,GAAG;EAAS;AAIlD,QAAO,WAAW,KAHJ,OAAO,KAAK,MAAM,OAAO,EACrC,mBAAmB;EAAE,SAAS,KAAK;EAAU,cAAc,KAAK;EAAe,EAChF,CAAC,CAC2B;;;;;;;AAgB/B,SAAgB,iBACd,SACA,QACS;CACT,MAAM,EAAE,KAAK,WAAW,gBAAgB,QAAQ;CAChD,MAAM,SAAS,OAAO,KAAK,OAAO;AAClC,KAAI,WAAW,KACb,QAAO;AACT,eAAc,SAAS,OAAO;AAC9B,uBAAsB,QAAQ;AAC9B,QAAO;;;;;;AAOT,SAAgB,kBAAkB,KAAa,MAAgB,OAAe,SAA+B;CAC3G,MAAM,OAAO;EAAE,GAAG;EAAoB,GAAG;EAAS;CAClD,MAAM,OAAO,UAAU,IAAI;AAC3B,KAAI,CAAC,KACH,QAAO,iBAAiB,KAAK,MAAM,CAAC,MAAM,EAAE,KAAK;CAGnD,IAAI,OAAO;AACX,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,KAAK,UAAU,MAAK,MAChC,EAAE,SAAS,cAAc,EAAE,WAAW,IAAI,UAAU,IACrD;AACD,MAAI,CAAC,OAAO,WAAW,GACrB,QAAO,iBAAiB,KAAK,MAAM,CAAC,MAAM,EAAE,KAAK;AACnD,SAAO,MAAM,SAAS;;AAGxB,KAAI,KAAK,SAAS,WAAW,CAAC,KAAK,SACjC,QAAO,iBAAiB,KAAK,MAAM,CAAC,MAAM,EAAE,KAAK;CAGnD,MAAM,aAAa,KAAK,SAAS,OAAM,MAAK,OAAO,EAAE,UAAU,SAAS;CACxE,IAAI,MAAM,KAAK,SAAS;AACxB,KAAI,YAAY;EACd,MAAM,QAAQ,KAAK,SAAS,KAAI,MAAK,EAAE,MAAgB;AACvD,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,KAAI,MAAM,cAAc,MAAM,GAAI,GAAG,GAAG;AACtC,SAAM;AACN;;;AASN,QAAO,WAAW,KAJJ,OAAO,KAAK,CAAC,GAAG,MAAM,IAAI,EAAE,OAAO;EAC/C,mBAAmB;GAAE,SAAS,KAAK;GAAU,cAAc,KAAK;GAAe;EAC/E,kBAAkB;EACnB,CAAC,CAC2B"}
1
+ {"version":3,"file":"package-json.mjs","names":[],"sources":["../../src/core/package-json.ts"],"sourcesContent":["import { existsSync, readFileSync, writeFileSync } from 'node:fs'\nimport { applyEdits, modify, parseTree } from 'jsonc-parser'\n\nexport interface EditOptions {\n /** Formatting options for inserted content */\n tabSize?: number\n insertSpaces?: boolean\n}\n\nconst defaultEditOptions: EditOptions = { tabSize: 2, insertSpaces: true }\n\n// ── Cached reader ──────────────────────────────────────────────\n\nconst cache = new Map<string, { raw: string, parsed: Record<string, unknown> }>()\n\n/**\n * Read and parse a package.json, returning cached result on repeat calls.\n * Throws if the file does not exist.\n */\nexport function readPackageJson(pkgPath: string): { raw: string, parsed: Record<string, unknown> } {\n const hit = cache.get(pkgPath)\n if (hit)\n return hit\n const raw = readFileSync(pkgPath, 'utf-8')\n const parsed = JSON.parse(raw) as Record<string, unknown>\n const entry = { raw, parsed }\n cache.set(pkgPath, entry)\n return entry\n}\n\n/**\n * Same as readPackageJson but returns null when the file is missing or unparseable.\n */\nexport function readPackageJsonSafe(pkgPath: string): { raw: string, parsed: Record<string, unknown> } | null {\n if (cache.has(pkgPath))\n return cache.get(pkgPath)!\n if (!existsSync(pkgPath))\n return null\n try {\n return readPackageJson(pkgPath)\n }\n catch {\n return null\n }\n}\n\n/**\n * Drop any cached entry so the next read hits disk.\n */\nexport function invalidatePackageJson(pkgPath: string): void {\n cache.delete(pkgPath)\n}\n\n/**\n * Clear all cached entries. Useful in tests.\n */\nexport function clearPackageJsonCache(): void {\n cache.clear()\n}\n\n// ── JSON editing helpers ───────────────────────────────────────\n\n/**\n * Set a value at a JSON path, preserving all surrounding formatting.\n * Returns the modified file content as a string.\n */\nexport function editJsonProperty(raw: string, path: (string | number)[], value: unknown, options?: EditOptions): string {\n const opts = { ...defaultEditOptions, ...options }\n const edits = modify(raw, path, value, {\n formattingOptions: { tabSize: opts.tabSize!, insertSpaces: opts.insertSpaces! },\n })\n return applyEdits(raw, edits)\n}\n\n/**\n * Remove a value at a JSON path, preserving all surrounding formatting.\n */\nexport function removeJsonProperty(raw: string, path: (string | number)[]): string {\n const edits = modify(raw, path, undefined, {})\n return applyEdits(raw, edits)\n}\n\n/**\n * Read a package.json, apply an edit function, write it back, and invalidate the cache.\n * The edit function receives the raw text and parsed object,\n * and returns the new raw text (or null to skip writing).\n */\nexport function patchPackageJson(\n pkgPath: string,\n editFn: (raw: string, pkg: Record<string, unknown>) => string | null,\n): boolean {\n const { raw, parsed } = readPackageJson(pkgPath)\n const result = editFn(raw, parsed)\n if (result === null)\n return false\n writeFileSync(pkgPath, result)\n invalidatePackageJson(pkgPath)\n return true\n}\n\n/**\n * Append a value to a JSON array at the given path, preserving formatting.\n * Inserts in sorted order if the array contains strings.\n */\nexport function appendToJsonArray(raw: string, path: string[], value: string, options?: EditOptions): string {\n const opts = { ...defaultEditOptions, ...options }\n const tree = parseTree(raw)\n if (!tree)\n return editJsonProperty(raw, path, [value], opts)\n\n // Walk to the target array node\n let node = tree\n for (const key of path) {\n const child = node.children?.find(c =>\n c.type === 'property' && c.children?.[0]?.value === key,\n )\n if (!child?.children?.[1])\n return editJsonProperty(raw, path, [value], opts)\n node = child.children[1]\n }\n\n if (node.type !== 'array' || !node.children)\n return editJsonProperty(raw, path, [value], opts)\n\n // Find sorted insertion index (only for string-only arrays)\n const allStrings = node.children.every(c => typeof c.value === 'string')\n let idx = node.children.length\n if (allStrings) {\n const items = node.children.map(c => c.value as string)\n for (let i = 0; i < items.length; i++) {\n if (value.localeCompare(items[i]!) < 0) {\n idx = i\n break\n }\n }\n }\n\n const edits = modify(raw, [...path, idx], value, {\n formattingOptions: { tabSize: opts.tabSize!, insertSpaces: opts.insertSpaces! },\n isArrayInsertion: true,\n })\n return applyEdits(raw, edits)\n}\n"],"mappings":";;AASA,MAAM,qBAAkC;CAAE,SAAS;CAAG,cAAc;CAAM;AAI1E,MAAM,wBAAQ,IAAI,KAA+D;;;;CAMjF,MAAA,MAAgB,aAAgB,SAAmE,QAAA;CACjG,MAAM,QAAM;EACZ;EAEA,QAAM,KAAM,MAAA,IAAA;EAEZ;OAAgB,IAAA,SAAA,MAAA;QAAK;;AAErB,SAAO,oBAAA,SAAA;;;;;SAMO;AACd,SAAI;;;SAOE,sBAAA,SAAA;AACJ,OAAA,OAAO,QAAA;;;;;EAOX,GAAA;EACE;;;;;;SAiBM,iBAAO,SAAA,QAAA;OAAK,EAAA,KAAA,WAAA,gBAAA,QAAA;OAAuB,SAAA,OAAA,KAAA,OAAA;KAAS,WAAA,KAAA,QAAA;AAIlD,eAAO,SAHO,OAAO;uBACgB,QAAA;QAAU;;;;;;;CAkBjD,MAAA,OAAgB,UAAA,IACd;AAGA,KAAA,CAAA,KAAQ,QAAK,iBAAW,KAAgB,MAAA,CAAA,MAAQ,EAAA,KAAA;CAChD,IAAA,OAAM;AACN,MAAI,MAAA,OAAW,MACb;EACF,MAAA,QAAc,KAAA,UAAgB,MAAA,MAAA,EAAA,SAAA,cAAA,EAAA,WAAA,IAAA,UAAA,IAAA;AAC9B,MAAA,CAAA,OAAA,WAAsB,GAAA,QAAQ,iBAAA,KAAA,MAAA,CAAA,MAAA,EAAA,KAAA;AAC9B,SAAO,MAAA,SAAA;;;;;;EAOT,MAAA,QAAgB,KAAA,SAAkB,KAAa,MAAgB,EAAA,MAAe;AAC5E,OAAM,IAAA,IAAO,GAAA,IAAA,MAAA,QAAA,IAAA,KAAA,MAAA,cAAA,MAAA,GAAA,GAAA,GAAA;AAAE,SAAG;AAAoB;;;AAEtC,QAAK,WACI,KAAA,OAAA,KAAiB,CAAA,GAAA,MAAK,IAAO,EAAA,OAAQ;EAG9C,mBAAW;GACX,SAAW,KAAA;GACT,cAAc,KAAK;GAGnB;EAEA,kBAAa;;;SAQX,uBAAoB,GAAA,oBAAA,GAAA,oBAAA,GAAA,qBAAA"}