@anytio/pspm 0.12.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -2
- package/CLI_GUIDE.md +248 -8
- package/README.md +4 -2
- package/dist/add-CcgUlOLa.js +755 -0
- package/dist/add-CcgUlOLa.js.map +1 -0
- package/dist/add-Cnn-OR9g.js +2 -0
- package/dist/api-client-CBTk37gh.js +2 -0
- package/dist/api-client-DBXUpGoX.js +452 -0
- package/dist/api-client-DBXUpGoX.js.map +1 -0
- package/dist/config-BQy_Rjip.js +470 -0
- package/dist/config-BQy_Rjip.js.map +1 -0
- package/dist/config-BZJ6_GsC.js +2 -0
- package/dist/index.js +2782 -6826
- package/dist/index.js.map +1 -1
- package/dist/install-gcvbBeWi.js +2 -0
- package/dist/install-lNvqIk5c.js +479 -0
- package/dist/install-lNvqIk5c.js.map +1 -0
- package/dist/symlinks-BTw8X0GG.js +1834 -0
- package/dist/symlinks-BTw8X0GG.js.map +1 -0
- package/package.json +14 -12
|
@@ -0,0 +1,1834 @@
|
|
|
1
|
+
import { a as getGithubSkillVersion, g as listSkillVersions, h as listOrgSkillVersions, m as getSkillVersion, n as configure, o as listGithubSkillVersions, p as getOrgSkillVersion } from "./api-client-DBXUpGoX.js";
|
|
2
|
+
import { a as getLegacyLockfilePath, s as getLockfilePath, u as isGlobalMode, v as getRegistryUrl } from "./config-BQy_Rjip.js";
|
|
3
|
+
import { dirname, join, relative } from "node:path";
|
|
4
|
+
import { cp, lstat, mkdir, readFile, readdir, readlink, rm, stat, symlink, writeFile } from "node:fs/promises";
|
|
5
|
+
import { homedir } from "node:os";
|
|
6
|
+
import { createCipheriv, createDecipheriv, createHash, randomBytes, scryptSync } from "node:crypto";
|
|
7
|
+
import ignore from "ignore";
|
|
8
|
+
import * as semver$1 from "semver";
|
|
9
|
+
import { checkbox } from "@inquirer/prompts";
|
|
10
|
+
//#region src/lib/encryption.ts
|
|
11
|
+
const ALGORITHM = "aes-256-gcm";
|
|
12
|
+
const KEY_LENGTH = 32;
|
|
13
|
+
const IV_LENGTH = 16;
|
|
14
|
+
const SALT_LENGTH = 32;
|
|
15
|
+
const SCRYPT_COST = 16384;
|
|
16
|
+
const SCRYPT_BLOCK_SIZE = 8;
|
|
17
|
+
const SCRYPT_PARALLELISM = 1;
|
|
18
|
+
const AUTH_TAG_LENGTH = 16;
|
|
19
|
+
/**
|
|
20
|
+
* Derive an AES-256 key from a passphrase using scrypt.
|
|
21
|
+
*/
|
|
22
|
+
function deriveKey(passphrase, salt) {
|
|
23
|
+
return scryptSync(passphrase, salt, KEY_LENGTH, {
|
|
24
|
+
N: SCRYPT_COST,
|
|
25
|
+
r: SCRYPT_BLOCK_SIZE,
|
|
26
|
+
p: SCRYPT_PARALLELISM
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Encrypt a buffer using AES-256-GCM with a passphrase.
|
|
31
|
+
*
|
|
32
|
+
* @param data - The plaintext buffer to encrypt
|
|
33
|
+
* @param passphrase - The encryption passphrase
|
|
34
|
+
* @param scope - The scope identifier (e.g., "@user/alice" or "@org/acme")
|
|
35
|
+
* @returns The encrypted buffer and metadata needed for decryption
|
|
36
|
+
*/
|
|
37
|
+
function encryptBuffer(data, passphrase, scope) {
|
|
38
|
+
const salt = randomBytes(SALT_LENGTH);
|
|
39
|
+
const iv = randomBytes(IV_LENGTH);
|
|
40
|
+
const cipher = createCipheriv(ALGORITHM, deriveKey(passphrase, salt), iv, { authTagLength: AUTH_TAG_LENGTH });
|
|
41
|
+
const encrypted = Buffer.concat([cipher.update(data), cipher.final()]);
|
|
42
|
+
const authTag = cipher.getAuthTag();
|
|
43
|
+
return {
|
|
44
|
+
encrypted,
|
|
45
|
+
metadata: {
|
|
46
|
+
algorithm: ALGORITHM,
|
|
47
|
+
kdf: "scrypt",
|
|
48
|
+
salt: salt.toString("hex"),
|
|
49
|
+
iv: iv.toString("hex"),
|
|
50
|
+
authTag: authTag.toString("hex"),
|
|
51
|
+
scope
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Decrypt a buffer using AES-256-GCM with a passphrase.
|
|
57
|
+
*
|
|
58
|
+
* @param encrypted - The encrypted buffer
|
|
59
|
+
* @param passphrase - The encryption passphrase
|
|
60
|
+
* @param metadata - The encryption metadata (salt, iv, authTag)
|
|
61
|
+
* @returns The decrypted plaintext buffer
|
|
62
|
+
* @throws Error if decryption fails (wrong passphrase or corrupted data)
|
|
63
|
+
*/
|
|
64
|
+
function decryptBuffer(encrypted, passphrase, metadata) {
|
|
65
|
+
const salt = Buffer.from(metadata.salt, "hex");
|
|
66
|
+
const iv = Buffer.from(metadata.iv, "hex");
|
|
67
|
+
const authTag = Buffer.from(metadata.authTag, "hex");
|
|
68
|
+
const decipher = createDecipheriv(ALGORITHM, deriveKey(passphrase, salt), iv, { authTagLength: AUTH_TAG_LENGTH });
|
|
69
|
+
decipher.setAuthTag(authTag);
|
|
70
|
+
return Buffer.concat([decipher.update(encrypted), decipher.final()]);
|
|
71
|
+
}
|
|
72
|
+
//#endregion
|
|
73
|
+
//#region src/lib/ignore.ts
|
|
74
|
+
/**
|
|
75
|
+
* Ignore file handling for PSPM publish/pack
|
|
76
|
+
*
|
|
77
|
+
* Similar to npm's .npmignore behavior:
|
|
78
|
+
* - If .pspmignore exists, use it
|
|
79
|
+
* - Otherwise, fallback to .gitignore
|
|
80
|
+
* - Always ignore node_modules and .git regardless
|
|
81
|
+
*/
|
|
82
|
+
/**
|
|
83
|
+
* Files/directories that are always ignored regardless of ignore file contents
|
|
84
|
+
*/
|
|
85
|
+
const ALWAYS_IGNORED = [
|
|
86
|
+
"node_modules",
|
|
87
|
+
".git",
|
|
88
|
+
".pspm-publish"
|
|
89
|
+
];
|
|
90
|
+
/**
|
|
91
|
+
* Load ignore patterns from .pspmignore or .gitignore
|
|
92
|
+
*
|
|
93
|
+
* Priority:
|
|
94
|
+
* 1. .pspmignore (if exists)
|
|
95
|
+
* 2. .gitignore (if exists)
|
|
96
|
+
* 3. Default patterns only (node_modules, .git)
|
|
97
|
+
*
|
|
98
|
+
* @param cwd - The directory to look for ignore files (defaults to process.cwd())
|
|
99
|
+
* @returns An ignore instance and the source file used
|
|
100
|
+
*/
|
|
101
|
+
async function loadIgnorePatterns(cwd = process.cwd()) {
|
|
102
|
+
const ig = ignore();
|
|
103
|
+
ig.add(ALWAYS_IGNORED);
|
|
104
|
+
const pspmIgnorePath = join(cwd, ".pspmignore");
|
|
105
|
+
try {
|
|
106
|
+
const patterns = parseIgnorePatterns(await readFile(pspmIgnorePath, "utf-8"));
|
|
107
|
+
ig.add(patterns);
|
|
108
|
+
return {
|
|
109
|
+
ig,
|
|
110
|
+
source: ".pspmignore",
|
|
111
|
+
patterns
|
|
112
|
+
};
|
|
113
|
+
} catch {}
|
|
114
|
+
const gitIgnorePath = join(cwd, ".gitignore");
|
|
115
|
+
try {
|
|
116
|
+
const patterns = parseIgnorePatterns(await readFile(gitIgnorePath, "utf-8"));
|
|
117
|
+
ig.add(patterns);
|
|
118
|
+
return {
|
|
119
|
+
ig,
|
|
120
|
+
source: ".gitignore",
|
|
121
|
+
patterns
|
|
122
|
+
};
|
|
123
|
+
} catch {}
|
|
124
|
+
return {
|
|
125
|
+
ig,
|
|
126
|
+
source: null,
|
|
127
|
+
patterns: []
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Create rsync exclude arguments from ignore patterns
|
|
132
|
+
*
|
|
133
|
+
* @param ig - The ignore instance
|
|
134
|
+
* @returns Array of --exclude='pattern' arguments for rsync
|
|
135
|
+
*/
|
|
136
|
+
function getExcludeArgsForRsync(patterns) {
|
|
137
|
+
return [...new Set([...ALWAYS_IGNORED, ...patterns])].map((p) => `--exclude='${p}'`).join(" ");
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Parse an ignore file content into an array of patterns
|
|
141
|
+
* Filters out comments and empty lines
|
|
142
|
+
*
|
|
143
|
+
* @param content - The content of an ignore file
|
|
144
|
+
* @returns Array of patterns
|
|
145
|
+
*/
|
|
146
|
+
function parseIgnorePatterns(content) {
|
|
147
|
+
return content.split("\n").map((line) => line.trim()).filter((line) => line && !line.startsWith("#"));
|
|
148
|
+
}
|
|
149
|
+
//#endregion
|
|
150
|
+
//#region src/lib/integrity.ts
|
|
151
|
+
/**
|
|
152
|
+
* Calculate integrity hash for a buffer.
|
|
153
|
+
* Uses SHA-256 with base64 encoding, prefixed with "sha256-".
|
|
154
|
+
*
|
|
155
|
+
* @param data - The buffer to hash
|
|
156
|
+
* @returns Integrity string (e.g., "sha256-abc123...")
|
|
157
|
+
*/
|
|
158
|
+
function calculateIntegrity(data) {
|
|
159
|
+
return `sha256-${createHash("sha256").update(data).digest("base64")}`;
|
|
160
|
+
}
|
|
161
|
+
//#endregion
|
|
162
|
+
//#region src/lib/lockfile.ts
|
|
163
|
+
/**
|
|
164
|
+
* PSPM Lockfile Schema URL for IDE validation
|
|
165
|
+
*/
|
|
166
|
+
const PSPM_LOCKFILE_SCHEMA_URL = "https://pspm.dev/schema/v1/pspm-lock.json";
|
|
167
|
+
//#endregion
|
|
168
|
+
//#region src/lib/manifest.ts
|
|
169
|
+
/**
|
|
170
|
+
* Default file patterns to include when publishing
|
|
171
|
+
*/
|
|
172
|
+
const DEFAULT_SKILL_FILES = [
|
|
173
|
+
"SKILL.md",
|
|
174
|
+
"runtime",
|
|
175
|
+
"scripts",
|
|
176
|
+
"data"
|
|
177
|
+
];
|
|
178
|
+
/**
|
|
179
|
+
* Schema URL for pspm.json (versioned)
|
|
180
|
+
*/
|
|
181
|
+
const PSPM_SCHEMA_URL = "https://pspm.dev/schema/v1/pspm.json";
|
|
182
|
+
/**
|
|
183
|
+
* Validate that a manifest has required fields
|
|
184
|
+
*/
|
|
185
|
+
function validateManifest(manifest) {
|
|
186
|
+
if (!manifest.name) return {
|
|
187
|
+
valid: false,
|
|
188
|
+
error: "Manifest must have a 'name' field"
|
|
189
|
+
};
|
|
190
|
+
if (!manifest.version) return {
|
|
191
|
+
valid: false,
|
|
192
|
+
error: "Manifest must have a 'version' field"
|
|
193
|
+
};
|
|
194
|
+
const parts = manifest.name.split("/");
|
|
195
|
+
const bareName = parts[parts.length - 1];
|
|
196
|
+
if (!/^[a-z][a-z0-9_-]*$/.test(bareName)) return {
|
|
197
|
+
valid: false,
|
|
198
|
+
error: "Name must start with a lowercase letter and contain only lowercase letters, numbers, hyphens, and underscores"
|
|
199
|
+
};
|
|
200
|
+
if (!/^\d+\.\d+\.\d+/.test(manifest.version)) return {
|
|
201
|
+
valid: false,
|
|
202
|
+
error: "Version must be a valid semantic version (e.g., 1.0.0)"
|
|
203
|
+
};
|
|
204
|
+
return { valid: true };
|
|
205
|
+
}
|
|
206
|
+
//#endregion
|
|
207
|
+
//#region ../../packages/shared/skill-types/src/specifier.ts
|
|
208
|
+
/**
|
|
209
|
+
* Unified registry specifier regex pattern.
|
|
210
|
+
*
|
|
211
|
+
* Matches:
|
|
212
|
+
* - @user/{owner}/{name}[@version]
|
|
213
|
+
* - @org/{owner}/{name}[@version]
|
|
214
|
+
* - @github/{owner}/{repo}/{skillname}[@version]
|
|
215
|
+
*
|
|
216
|
+
* Group 1: namespace (user|org|github)
|
|
217
|
+
* Group 2: owner
|
|
218
|
+
* Group 3: name (skill name for user/org, repo name for github)
|
|
219
|
+
* Group 4: optional subname (skill name within repo, github only)
|
|
220
|
+
* Group 5: optional @version
|
|
221
|
+
*/
|
|
222
|
+
const REGISTRY_SPECIFIER_PATTERN$1 = /^@(user|org|github)\/([a-zA-Z0-9_-]+)\/([a-zA-Z0-9._-]+)(?:\/([a-z][a-z0-9-]*))?(?:@(.+))?$/;
|
|
223
|
+
/**
|
|
224
|
+
* Parse a registry specifier string (any namespace).
|
|
225
|
+
*
|
|
226
|
+
* @param specifier - The specifier string
|
|
227
|
+
* @returns Parsed specifier or null if invalid
|
|
228
|
+
*
|
|
229
|
+
* @example
|
|
230
|
+
* ```typescript
|
|
231
|
+
* parseRegistrySpecifier("@user/bsheng/my-skill@^1.0.0")
|
|
232
|
+
* // => { namespace: "user", owner: "bsheng", name: "my-skill", versionRange: "^1.0.0" }
|
|
233
|
+
*
|
|
234
|
+
* parseRegistrySpecifier("@org/anyt/code-review")
|
|
235
|
+
* // => { namespace: "org", owner: "anyt", name: "code-review" }
|
|
236
|
+
*
|
|
237
|
+
* parseRegistrySpecifier("@github/microsoft/skills/azure-ai@1.0.0")
|
|
238
|
+
* // => { namespace: "github", owner: "microsoft", name: "skills", subname: "azure-ai", versionRange: "1.0.0" }
|
|
239
|
+
* ```
|
|
240
|
+
*/
|
|
241
|
+
function parseRegistrySpecifier$1(specifier) {
|
|
242
|
+
const match = specifier.match(REGISTRY_SPECIFIER_PATTERN$1);
|
|
243
|
+
if (!match) return null;
|
|
244
|
+
const namespace = match[1];
|
|
245
|
+
const owner = match[2];
|
|
246
|
+
const name = match[3];
|
|
247
|
+
const subname = match[4];
|
|
248
|
+
const versionRange = match[5];
|
|
249
|
+
if (!owner || !name) return null;
|
|
250
|
+
if (namespace === "github" && !subname) return null;
|
|
251
|
+
if (namespace !== "github" && subname) return null;
|
|
252
|
+
return {
|
|
253
|
+
namespace,
|
|
254
|
+
owner,
|
|
255
|
+
name,
|
|
256
|
+
subname: subname || void 0,
|
|
257
|
+
versionRange: versionRange || void 0
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
//#endregion
|
|
261
|
+
//#region src/lib/resolver-api.ts
|
|
262
|
+
/**
|
|
263
|
+
* Fetch the list of versions for a package across all namespaces.
|
|
264
|
+
*/
|
|
265
|
+
async function fetchVersionList(parsed) {
|
|
266
|
+
if (parsed.namespace === "github" && parsed.subname) {
|
|
267
|
+
const resp = await listGithubSkillVersions(parsed.owner, parsed.name, parsed.subname);
|
|
268
|
+
return resp.status === 200 ? resp.data : void 0;
|
|
269
|
+
}
|
|
270
|
+
if (parsed.namespace === "org") {
|
|
271
|
+
const resp = await listOrgSkillVersions(parsed.owner, parsed.name);
|
|
272
|
+
return resp.status === 200 ? resp.data : void 0;
|
|
273
|
+
}
|
|
274
|
+
const resp = await listSkillVersions(parsed.owner, parsed.name);
|
|
275
|
+
return resp.status === 200 ? resp.data : void 0;
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* Fetch package details for a specific version across all namespaces.
|
|
279
|
+
*/
|
|
280
|
+
async function fetchVersionDetails(parsed, version) {
|
|
281
|
+
if (parsed.namespace === "github" && parsed.subname) {
|
|
282
|
+
const resp = await getGithubSkillVersion(parsed.owner, parsed.name, parsed.subname, version);
|
|
283
|
+
return resp.status === 200 && resp.data ? resp.data : null;
|
|
284
|
+
}
|
|
285
|
+
if (parsed.namespace === "org") {
|
|
286
|
+
const resp = await getOrgSkillVersion(parsed.owner, parsed.name, version);
|
|
287
|
+
return resp.status === 200 && resp.data ? resp.data : null;
|
|
288
|
+
}
|
|
289
|
+
const resp = await getSkillVersion(parsed.owner, parsed.name, version);
|
|
290
|
+
return resp.status === 200 && resp.data ? resp.data : null;
|
|
291
|
+
}
|
|
292
|
+
//#endregion
|
|
293
|
+
//#region src/lib/resolver-format.ts
|
|
294
|
+
/**
|
|
295
|
+
* Topologically sort packages using Kahn's algorithm.
|
|
296
|
+
* Packages with no dependencies are installed first.
|
|
297
|
+
*
|
|
298
|
+
* @param graph - The dependency graph
|
|
299
|
+
* @returns Sorted list of package names
|
|
300
|
+
*/
|
|
301
|
+
function topologicalSort(graph) {
|
|
302
|
+
const inDegree = /* @__PURE__ */ new Map();
|
|
303
|
+
const dependents = /* @__PURE__ */ new Map();
|
|
304
|
+
for (const name of graph.nodes.keys()) {
|
|
305
|
+
inDegree.set(name, 0);
|
|
306
|
+
dependents.set(name, []);
|
|
307
|
+
}
|
|
308
|
+
for (const [name, node] of graph.nodes.entries()) for (const depName of Object.keys(node.dependencies)) if (graph.nodes.has(depName)) {
|
|
309
|
+
inDegree.set(name, (inDegree.get(name) ?? 0) + 1);
|
|
310
|
+
if (!dependents.has(depName)) dependents.set(depName, []);
|
|
311
|
+
dependents.get(depName)?.push(name);
|
|
312
|
+
}
|
|
313
|
+
const queue = [];
|
|
314
|
+
for (const [name, degree] of inDegree.entries()) if (degree === 0) queue.push(name);
|
|
315
|
+
const sorted = [];
|
|
316
|
+
while (queue.length > 0) {
|
|
317
|
+
const current = queue.shift();
|
|
318
|
+
if (!current) continue;
|
|
319
|
+
sorted.push(current);
|
|
320
|
+
const deps = dependents.get(current) ?? [];
|
|
321
|
+
for (const dependent of deps) {
|
|
322
|
+
const newDegree = (inDegree.get(dependent) ?? 1) - 1;
|
|
323
|
+
inDegree.set(dependent, newDegree);
|
|
324
|
+
if (newDegree === 0 && !sorted.includes(dependent)) queue.push(dependent);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return sorted;
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Compute installation order from lockfile packages.
|
|
331
|
+
* Dependencies are installed before dependents.
|
|
332
|
+
*
|
|
333
|
+
* @param packages - Lockfile packages with dependencies field
|
|
334
|
+
* @returns Sorted list of package names
|
|
335
|
+
*/
|
|
336
|
+
function computeInstallOrder(packages) {
|
|
337
|
+
const visited = /* @__PURE__ */ new Set();
|
|
338
|
+
const order = [];
|
|
339
|
+
function visit(name) {
|
|
340
|
+
if (visited.has(name)) return;
|
|
341
|
+
visited.add(name);
|
|
342
|
+
const entry = packages[name];
|
|
343
|
+
if (entry?.dependencies) for (const dep of Object.keys(entry.dependencies)) visit(dep);
|
|
344
|
+
order.push(name);
|
|
345
|
+
}
|
|
346
|
+
for (const name of Object.keys(packages)) visit(name);
|
|
347
|
+
return order;
|
|
348
|
+
}
|
|
349
|
+
/**
|
|
350
|
+
* Format resolution errors for display.
|
|
351
|
+
*
|
|
352
|
+
* @param errors - Resolution errors
|
|
353
|
+
* @returns Formatted error messages
|
|
354
|
+
*/
|
|
355
|
+
function formatResolutionErrors(errors) {
|
|
356
|
+
return errors.map((error) => {
|
|
357
|
+
switch (error.type) {
|
|
358
|
+
case "circular_dependency": return `Circular dependency: ${error.path?.join(" -> ") ?? error.package}`;
|
|
359
|
+
case "max_depth_exceeded": return `Max depth exceeded at: ${error.path?.join(" -> ") ?? error.package}`;
|
|
360
|
+
case "no_satisfying_version": return error.message;
|
|
361
|
+
case "package_not_found": return `Package not found: ${error.package}`;
|
|
362
|
+
case "fetch_error": return error.message;
|
|
363
|
+
default: return error.message;
|
|
364
|
+
}
|
|
365
|
+
});
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Format version conflicts for display.
|
|
369
|
+
*
|
|
370
|
+
* @param conflicts - Version conflicts
|
|
371
|
+
* @returns Formatted conflict messages
|
|
372
|
+
*/
|
|
373
|
+
function formatVersionConflicts(conflicts) {
|
|
374
|
+
return conflicts.map((conflict) => {
|
|
375
|
+
const requirements = conflict.ranges.map((r) => `${r.dependent} needs ${r.range}`).join(", ");
|
|
376
|
+
return `No version of ${conflict.package} satisfies: ${requirements}`;
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
/**
|
|
380
|
+
* Print resolution errors to console.
|
|
381
|
+
*
|
|
382
|
+
* @param errors - Resolution errors
|
|
383
|
+
* @param conflicts - Version conflicts
|
|
384
|
+
*/
|
|
385
|
+
function printResolutionErrors(errors, conflicts = []) {
|
|
386
|
+
if (errors.length > 0) {
|
|
387
|
+
console.error("\nResolution errors:");
|
|
388
|
+
for (const msg of formatResolutionErrors(errors)) console.error(` - ${msg}`);
|
|
389
|
+
}
|
|
390
|
+
if (conflicts.length > 0) {
|
|
391
|
+
console.error("\nVersion conflicts:");
|
|
392
|
+
for (const msg of formatVersionConflicts(conflicts)) console.error(` - ${msg}`);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
//#endregion
|
|
396
|
+
//#region src/lib/version.ts
|
|
397
|
+
/**
|
|
398
|
+
* Resolve the best matching version from a list of available versions.
|
|
399
|
+
*
|
|
400
|
+
* @param range - The version range to match (e.g., "^1.0.0", "~2.1.0", "*")
|
|
401
|
+
* @param availableVersions - List of available version strings
|
|
402
|
+
* @returns The best matching version or null if none found
|
|
403
|
+
*/
|
|
404
|
+
function resolveVersion(range, availableVersions) {
|
|
405
|
+
const sorted = availableVersions.filter((v) => semver$1.valid(v)).sort((a, b) => semver$1.rcompare(a, b));
|
|
406
|
+
if (!range || range === "latest" || range === "*") return sorted[0] ?? null;
|
|
407
|
+
return semver$1.maxSatisfying(sorted, range);
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Check if version a is greater than version b.
|
|
411
|
+
*/
|
|
412
|
+
function isNewerVersion(a, b) {
|
|
413
|
+
return semver$1.gt(a, b);
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Find the highest version that satisfies ALL given ranges.
|
|
417
|
+
* Used for pnpm-style dependency resolution where multiple dependents
|
|
418
|
+
* may require the same package with different version constraints.
|
|
419
|
+
*
|
|
420
|
+
* @param ranges - Array of semver ranges to satisfy (e.g., ["^1.0.0", ">=1.2.0"])
|
|
421
|
+
* @param availableVersions - List of available version strings
|
|
422
|
+
* @returns The highest version satisfying all ranges, or null if none found
|
|
423
|
+
*/
|
|
424
|
+
function findHighestSatisfying(ranges, availableVersions) {
|
|
425
|
+
const sorted = availableVersions.filter((v) => semver$1.valid(v)).sort((a, b) => semver$1.rcompare(a, b));
|
|
426
|
+
if (sorted.length === 0) return null;
|
|
427
|
+
const normalizedRanges = ranges.map((r) => !r || r === "latest" || r === "*" ? "*" : r);
|
|
428
|
+
for (const version of sorted) if (normalizedRanges.every((range) => semver$1.satisfies(version, range))) return version;
|
|
429
|
+
return null;
|
|
430
|
+
}
|
|
431
|
+
//#endregion
|
|
432
|
+
//#region src/lib/resolver.ts
|
|
433
|
+
/**
|
|
434
|
+
* Recursive Dependency Resolver for PSPM
|
|
435
|
+
*
|
|
436
|
+
* Implements pnpm-style dependency resolution:
|
|
437
|
+
* - Highest satisfying version strategy
|
|
438
|
+
* - 5-depth limit to prevent deep trees
|
|
439
|
+
* - Circular dependency detection
|
|
440
|
+
* - Topological sort for installation order
|
|
441
|
+
*/
|
|
442
|
+
function buildNode(name, version, versionRange, details, depth, dependent) {
|
|
443
|
+
const manifest = details.manifest;
|
|
444
|
+
return {
|
|
445
|
+
name,
|
|
446
|
+
version,
|
|
447
|
+
versionRange,
|
|
448
|
+
downloadUrl: details.downloadUrl,
|
|
449
|
+
integrity: `sha256-${Buffer.from(details.checksum, "hex").toString("base64")}`,
|
|
450
|
+
depth,
|
|
451
|
+
dependencies: manifest?.dependencies ?? {},
|
|
452
|
+
dependents: [dependent],
|
|
453
|
+
isDirect: depth === 0,
|
|
454
|
+
deprecated: details.deprecationMessage ?? void 0
|
|
455
|
+
};
|
|
456
|
+
}
|
|
457
|
+
/**
|
|
458
|
+
* Resolve dependencies recursively using BFS.
|
|
459
|
+
*
|
|
460
|
+
* Algorithm:
|
|
461
|
+
* 1. Queue root dependencies at depth=0
|
|
462
|
+
* 2. For each package, collect all version ranges from dependents
|
|
463
|
+
* 3. Find highest version satisfying ALL ranges
|
|
464
|
+
* 4. Fetch package details including its dependencies
|
|
465
|
+
* 5. Queue transitive dependencies at depth+1
|
|
466
|
+
* 6. Topologically sort for installation order
|
|
467
|
+
*
|
|
468
|
+
* @param rootDeps - Direct dependencies: name -> version range
|
|
469
|
+
* @param config - Resolver configuration
|
|
470
|
+
* @returns Resolution result with graph and install order
|
|
471
|
+
*/
|
|
472
|
+
async function resolveRecursive(rootDeps, config) {
|
|
473
|
+
const graph = {
|
|
474
|
+
nodes: /* @__PURE__ */ new Map(),
|
|
475
|
+
roots: Object.keys(rootDeps),
|
|
476
|
+
errors: [],
|
|
477
|
+
conflicts: []
|
|
478
|
+
};
|
|
479
|
+
configure({
|
|
480
|
+
registryUrl: config.registryUrl,
|
|
481
|
+
apiKey: config.apiKey
|
|
482
|
+
});
|
|
483
|
+
const rangesByPackage = /* @__PURE__ */ new Map();
|
|
484
|
+
const queue = [];
|
|
485
|
+
for (const [name, range] of Object.entries(rootDeps)) queue.push({
|
|
486
|
+
name,
|
|
487
|
+
versionRange: range,
|
|
488
|
+
depth: 0,
|
|
489
|
+
dependent: "root",
|
|
490
|
+
path: []
|
|
491
|
+
});
|
|
492
|
+
await collectRanges(queue, rangesByPackage, /* @__PURE__ */ new Set(), graph, config);
|
|
493
|
+
await resolveFinalVersions(rangesByPackage, graph);
|
|
494
|
+
const installOrder = topologicalSort(graph);
|
|
495
|
+
return {
|
|
496
|
+
success: graph.errors.length === 0 && graph.conflicts.length === 0,
|
|
497
|
+
graph,
|
|
498
|
+
installOrder
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
async function collectRanges(queue, rangesByPackage, processing, graph, config) {
|
|
502
|
+
while (queue.length > 0) {
|
|
503
|
+
const item = queue.shift();
|
|
504
|
+
if (!item) continue;
|
|
505
|
+
const { name, versionRange, depth, dependent, path } = item;
|
|
506
|
+
if (depth > config.maxDepth) {
|
|
507
|
+
graph.errors.push({
|
|
508
|
+
type: "max_depth_exceeded",
|
|
509
|
+
package: name,
|
|
510
|
+
message: `Maximum dependency depth (${config.maxDepth}) exceeded at: ${[...path, name].join(" -> ")}`,
|
|
511
|
+
path: [...path, name]
|
|
512
|
+
});
|
|
513
|
+
continue;
|
|
514
|
+
}
|
|
515
|
+
if (path.includes(name)) {
|
|
516
|
+
graph.errors.push({
|
|
517
|
+
type: "circular_dependency",
|
|
518
|
+
package: name,
|
|
519
|
+
message: `Circular dependency detected: ${[...path, name].join(" -> ")}`,
|
|
520
|
+
path: [...path, name]
|
|
521
|
+
});
|
|
522
|
+
continue;
|
|
523
|
+
}
|
|
524
|
+
if (!rangesByPackage.has(name)) rangesByPackage.set(name, []);
|
|
525
|
+
rangesByPackage.get(name)?.push({
|
|
526
|
+
range: versionRange,
|
|
527
|
+
dependent,
|
|
528
|
+
depth
|
|
529
|
+
});
|
|
530
|
+
if (processing.has(name)) continue;
|
|
531
|
+
processing.add(name);
|
|
532
|
+
const parsed = parseRegistrySpecifier$1(name);
|
|
533
|
+
if (!parsed) {
|
|
534
|
+
graph.errors.push({
|
|
535
|
+
type: "package_not_found",
|
|
536
|
+
package: name,
|
|
537
|
+
message: `Invalid package name format: ${name}`
|
|
538
|
+
});
|
|
539
|
+
continue;
|
|
540
|
+
}
|
|
541
|
+
try {
|
|
542
|
+
const versionsData = await fetchVersionList(parsed);
|
|
543
|
+
if (!versionsData) {
|
|
544
|
+
graph.errors.push({
|
|
545
|
+
type: "package_not_found",
|
|
546
|
+
package: name,
|
|
547
|
+
message: `Package ${name} not found in registry`
|
|
548
|
+
});
|
|
549
|
+
continue;
|
|
550
|
+
}
|
|
551
|
+
if (versionsData.length === 0) {
|
|
552
|
+
graph.errors.push({
|
|
553
|
+
type: "package_not_found",
|
|
554
|
+
package: name,
|
|
555
|
+
message: `Package ${name} has no versions`
|
|
556
|
+
});
|
|
557
|
+
continue;
|
|
558
|
+
}
|
|
559
|
+
const availableVersions = versionsData.map((v) => v.version);
|
|
560
|
+
const resolvedVersion = findHighestSatisfying([versionRange], availableVersions);
|
|
561
|
+
if (!resolvedVersion) {
|
|
562
|
+
graph.errors.push({
|
|
563
|
+
type: "no_satisfying_version",
|
|
564
|
+
package: name,
|
|
565
|
+
message: `No version of ${name} satisfies: ${versionRange}`
|
|
566
|
+
});
|
|
567
|
+
continue;
|
|
568
|
+
}
|
|
569
|
+
const versionData = await fetchVersionDetails(parsed, resolvedVersion);
|
|
570
|
+
if (!versionData) {
|
|
571
|
+
graph.errors.push({
|
|
572
|
+
type: "fetch_error",
|
|
573
|
+
package: name,
|
|
574
|
+
message: `Failed to fetch ${name}@${resolvedVersion}`
|
|
575
|
+
});
|
|
576
|
+
continue;
|
|
577
|
+
}
|
|
578
|
+
const node = buildNode(name, resolvedVersion, versionRange, versionData, depth, dependent);
|
|
579
|
+
graph.nodes.set(name, node);
|
|
580
|
+
for (const [depName, depRange] of Object.entries(node.dependencies)) queue.push({
|
|
581
|
+
name: depName,
|
|
582
|
+
versionRange: depRange,
|
|
583
|
+
depth: depth + 1,
|
|
584
|
+
dependent: name,
|
|
585
|
+
path: [...path, name]
|
|
586
|
+
});
|
|
587
|
+
} catch (error) {
|
|
588
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
589
|
+
graph.errors.push({
|
|
590
|
+
type: "fetch_error",
|
|
591
|
+
package: name,
|
|
592
|
+
message: `Error fetching ${name}: ${message}`
|
|
593
|
+
});
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
async function resolveFinalVersions(rangesByPackage, graph) {
|
|
598
|
+
for (const [name, ranges] of rangesByPackage.entries()) {
|
|
599
|
+
const node = graph.nodes.get(name);
|
|
600
|
+
if (!node) continue;
|
|
601
|
+
node.dependents = [...new Set(ranges.map((r) => r.dependent))];
|
|
602
|
+
const allRanges = ranges.map((r) => r.range);
|
|
603
|
+
const parsed = parseRegistrySpecifier$1(name);
|
|
604
|
+
if (!parsed) continue;
|
|
605
|
+
try {
|
|
606
|
+
const versions = await fetchVersionList(parsed);
|
|
607
|
+
if (!versions) continue;
|
|
608
|
+
const availableVersions = versions.map((v) => v.version);
|
|
609
|
+
const finalVersion = findHighestSatisfying(allRanges, availableVersions);
|
|
610
|
+
if (!finalVersion) {
|
|
611
|
+
graph.conflicts.push({
|
|
612
|
+
package: name,
|
|
613
|
+
ranges: ranges.map((r) => ({
|
|
614
|
+
dependent: r.dependent,
|
|
615
|
+
range: r.range
|
|
616
|
+
})),
|
|
617
|
+
availableVersions
|
|
618
|
+
});
|
|
619
|
+
graph.errors.push({
|
|
620
|
+
type: "no_satisfying_version",
|
|
621
|
+
package: name,
|
|
622
|
+
message: `No version of ${name} satisfies all requirements: ${allRanges.join(", ")}`
|
|
623
|
+
});
|
|
624
|
+
continue;
|
|
625
|
+
}
|
|
626
|
+
if (finalVersion !== node.version) {
|
|
627
|
+
const versionData = await fetchVersionDetails(parsed, finalVersion);
|
|
628
|
+
if (versionData) {
|
|
629
|
+
node.version = finalVersion;
|
|
630
|
+
node.downloadUrl = versionData.downloadUrl;
|
|
631
|
+
node.integrity = `sha256-${Buffer.from(versionData.checksum, "hex").toString("base64")}`;
|
|
632
|
+
node.deprecated = versionData.deprecationMessage ?? void 0;
|
|
633
|
+
node.dependencies = versionData.manifest?.dependencies ?? {};
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
} catch {}
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
//#endregion
|
|
640
|
+
//#region src/lib/specifier.ts
|
|
641
|
+
/**
|
|
642
|
+
* Unified registry specifier regex pattern.
|
|
643
|
+
*
|
|
644
|
+
* Matches:
|
|
645
|
+
* - @user/{owner}/{name}[@version]
|
|
646
|
+
* - @org/{owner}/{name}[@version]
|
|
647
|
+
* - @github/{owner}/{repo}/{skillname}[@version]
|
|
648
|
+
*
|
|
649
|
+
* Group 1: namespace (user|org|github)
|
|
650
|
+
* Group 2: owner
|
|
651
|
+
* Group 3: name (skill name for user/org, repo name for github)
|
|
652
|
+
* Group 4: optional subname (skill name within repo, github only)
|
|
653
|
+
* Group 5: optional @version
|
|
654
|
+
*/
|
|
655
|
+
const REGISTRY_SPECIFIER_PATTERN = /^@(user|org|github)\/([a-zA-Z0-9_-]+)\/([a-zA-Z0-9._-]+)(?:\/([a-z][a-z0-9-]*))?(?:@(.+))?$/;
|
|
656
|
+
/**
|
|
657
|
+
* Parse a registry specifier string (any namespace).
|
|
658
|
+
*
|
|
659
|
+
* @param specifier - The specifier string
|
|
660
|
+
* @returns Parsed specifier or null if invalid
|
|
661
|
+
*/
|
|
662
|
+
function parseRegistrySpecifier(specifier) {
|
|
663
|
+
const match = specifier.match(REGISTRY_SPECIFIER_PATTERN);
|
|
664
|
+
if (!match) return null;
|
|
665
|
+
const namespace = match[1];
|
|
666
|
+
const owner = match[2];
|
|
667
|
+
const name = match[3];
|
|
668
|
+
const subname = match[4];
|
|
669
|
+
const versionRange = match[5];
|
|
670
|
+
if (!owner || !name) return null;
|
|
671
|
+
if (namespace === "github" && !subname) return null;
|
|
672
|
+
if (namespace !== "github" && subname) return null;
|
|
673
|
+
return {
|
|
674
|
+
namespace,
|
|
675
|
+
owner,
|
|
676
|
+
name,
|
|
677
|
+
subname: subname || void 0,
|
|
678
|
+
versionRange: versionRange || void 0
|
|
679
|
+
};
|
|
680
|
+
}
|
|
681
|
+
/**
|
|
682
|
+
* Generate a full registry identifier string.
|
|
683
|
+
*/
|
|
684
|
+
function generateRegistryIdentifier(spec) {
|
|
685
|
+
let base = `@${spec.namespace}/${spec.owner}/${spec.name}`;
|
|
686
|
+
if (spec.subname) base += `/${spec.subname}`;
|
|
687
|
+
if (spec.versionRange) base += `@${spec.versionRange}`;
|
|
688
|
+
return base;
|
|
689
|
+
}
|
|
690
|
+
/**
|
|
691
|
+
* Check if a string is a registry specifier (starts with @user/, @org/, or @github/).
|
|
692
|
+
*/
|
|
693
|
+
function isRegistrySpecifier(specifier) {
|
|
694
|
+
return specifier.startsWith("@user/") || specifier.startsWith("@org/") || specifier.startsWith("@github/");
|
|
695
|
+
}
|
|
696
|
+
/**
|
|
697
|
+
* GitHub specifier regex pattern
|
|
698
|
+
* Matches: github:{owner}/{repo}[/{path}][@{ref}]
|
|
699
|
+
*
|
|
700
|
+
* Group 1: owner
|
|
701
|
+
* Group 2: repo
|
|
702
|
+
* Group 3: optional /path (with leading slash)
|
|
703
|
+
* Group 4: optional @ref
|
|
704
|
+
*/
|
|
705
|
+
const GITHUB_SPECIFIER_PATTERN = /^github:([a-zA-Z0-9_-]+)\/([a-zA-Z0-9_.-]+)(\/[^@]+)?(?:@(.+))?$/;
|
|
706
|
+
/**
|
|
707
|
+
* Parse a GitHub specifier string.
|
|
708
|
+
*
|
|
709
|
+
* @param specifier - The specifier string (e.g., "github:owner/repo/path@ref")
|
|
710
|
+
* @returns Parsed specifier or null if invalid
|
|
711
|
+
*
|
|
712
|
+
* @example
|
|
713
|
+
* ```typescript
|
|
714
|
+
* parseGitHubSpecifier("github:vercel-labs/agent-skills/skills/react@main")
|
|
715
|
+
* // => { owner: "vercel-labs", repo: "agent-skills", path: "skills/react", ref: "main" }
|
|
716
|
+
*
|
|
717
|
+
* parseGitHubSpecifier("github:myorg/prompts")
|
|
718
|
+
* // => { owner: "myorg", repo: "prompts", path: undefined, ref: undefined }
|
|
719
|
+
* ```
|
|
720
|
+
*/
|
|
721
|
+
function parseGitHubSpecifier(specifier) {
|
|
722
|
+
const match = specifier.match(GITHUB_SPECIFIER_PATTERN);
|
|
723
|
+
if (!match) return null;
|
|
724
|
+
const [, owner, repo, pathWithSlash, ref] = match;
|
|
725
|
+
if (!owner || !repo) return null;
|
|
726
|
+
return {
|
|
727
|
+
owner,
|
|
728
|
+
repo,
|
|
729
|
+
path: pathWithSlash ? pathWithSlash.slice(1) : void 0,
|
|
730
|
+
ref: ref || void 0
|
|
731
|
+
};
|
|
732
|
+
}
|
|
733
|
+
/**
|
|
734
|
+
* Format a GitHubSpecifier back to string format.
|
|
735
|
+
*
|
|
736
|
+
* @param spec - The GitHub specifier object
|
|
737
|
+
* @returns Formatted string (e.g., "github:owner/repo/path@ref")
|
|
738
|
+
*/
|
|
739
|
+
function formatGitHubSpecifier(spec) {
|
|
740
|
+
let result = `github:${spec.owner}/${spec.repo}`;
|
|
741
|
+
if (spec.path) result += `/${spec.path}`;
|
|
742
|
+
if (spec.ref) result += `@${spec.ref}`;
|
|
743
|
+
return result;
|
|
744
|
+
}
|
|
745
|
+
/**
|
|
746
|
+
* Extract skill name from GitHub specifier.
|
|
747
|
+
* Uses the last segment of the path, or the repo name if no path.
|
|
748
|
+
*
|
|
749
|
+
* @param spec - The GitHub specifier object
|
|
750
|
+
* @returns Skill name (e.g., "react-best-practices" or "prompts")
|
|
751
|
+
*
|
|
752
|
+
* @example
|
|
753
|
+
* ```typescript
|
|
754
|
+
* getGitHubSkillName({ owner: "vercel-labs", repo: "agent-skills", path: "skills/react" })
|
|
755
|
+
* // => "react"
|
|
756
|
+
*
|
|
757
|
+
* getGitHubSkillName({ owner: "myorg", repo: "prompts" })
|
|
758
|
+
* // => "prompts"
|
|
759
|
+
* ```
|
|
760
|
+
*/
|
|
761
|
+
function getGitHubSkillName(spec) {
|
|
762
|
+
if (spec.path) {
|
|
763
|
+
const segments = spec.path.split("/").filter(Boolean);
|
|
764
|
+
const lastSegment = segments[segments.length - 1];
|
|
765
|
+
if (lastSegment) return lastSegment;
|
|
766
|
+
}
|
|
767
|
+
return spec.repo;
|
|
768
|
+
}
|
|
769
|
+
/**
|
|
770
|
+
* Check if a string is a GitHub specifier (github: prefix)
|
|
771
|
+
*/
|
|
772
|
+
function isGitHubSpecifier(specifier) {
|
|
773
|
+
return specifier.startsWith("github:");
|
|
774
|
+
}
|
|
775
|
+
/**
|
|
776
|
+
* GitHub URL patterns
|
|
777
|
+
*
|
|
778
|
+
* Matches:
|
|
779
|
+
* - https://github.com/owner/repo/tree/branch/path/to/skill
|
|
780
|
+
* - https://github.com/owner/repo/tree/branch
|
|
781
|
+
* - https://github.com/owner/repo
|
|
782
|
+
* - https://github.com/owner/repo.git
|
|
783
|
+
*/
|
|
784
|
+
const GITHUB_URL_TREE_PATTERN = /^https?:\/\/github\.com\/([^/]+)\/([^/]+)\/tree\/([^/]+)(?:\/(.+))?$/;
|
|
785
|
+
const GITHUB_URL_PATTERN = /^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?\/?$/;
|
|
786
|
+
/**
|
|
787
|
+
* GitHub shorthand pattern: owner/repo or owner/repo/path
|
|
788
|
+
* Must not contain :, not start with . / or @
|
|
789
|
+
*/
|
|
790
|
+
const GITHUB_SHORTHAND_PATTERN = /^([a-zA-Z0-9_-]+)\/([a-zA-Z0-9_.-]+)(?:\/(.+))?$/;
|
|
791
|
+
/**
|
|
792
|
+
* Check if a string is a GitHub URL (https://github.com/...)
|
|
793
|
+
*/
|
|
794
|
+
function isGitHubUrl(input) {
|
|
795
|
+
return /^https?:\/\/github\.com\/[^/]+\/[^/]+/.test(input);
|
|
796
|
+
}
|
|
797
|
+
/**
|
|
798
|
+
* Parse a GitHub URL into a GitHubSpecifier.
|
|
799
|
+
*/
|
|
800
|
+
function parseGitHubUrl(input) {
|
|
801
|
+
const treeMatch = input.match(GITHUB_URL_TREE_PATTERN);
|
|
802
|
+
if (treeMatch) {
|
|
803
|
+
const [, owner, repo, ref, path] = treeMatch;
|
|
804
|
+
if (!owner || !repo || !ref) return null;
|
|
805
|
+
return {
|
|
806
|
+
owner,
|
|
807
|
+
repo,
|
|
808
|
+
ref,
|
|
809
|
+
path: path || void 0
|
|
810
|
+
};
|
|
811
|
+
}
|
|
812
|
+
const repoMatch = input.match(GITHUB_URL_PATTERN);
|
|
813
|
+
if (repoMatch) {
|
|
814
|
+
const [, owner, repo] = repoMatch;
|
|
815
|
+
if (!owner || !repo) return null;
|
|
816
|
+
return {
|
|
817
|
+
owner,
|
|
818
|
+
repo
|
|
819
|
+
};
|
|
820
|
+
}
|
|
821
|
+
return null;
|
|
822
|
+
}
|
|
823
|
+
/**
|
|
824
|
+
* Check if a string is a GitHub shorthand (owner/repo or owner/repo/path).
|
|
825
|
+
*/
|
|
826
|
+
function isGitHubShorthand(input) {
|
|
827
|
+
if (input.includes(":") || input.startsWith(".") || input.startsWith("/") || input.startsWith("@")) return false;
|
|
828
|
+
return GITHUB_SHORTHAND_PATTERN.test(input);
|
|
829
|
+
}
|
|
830
|
+
/**
|
|
831
|
+
* Parse a GitHub shorthand into a GitHubSpecifier.
|
|
832
|
+
*/
|
|
833
|
+
function parseGitHubShorthand(input) {
|
|
834
|
+
if (input.includes(":") || input.startsWith(".") || input.startsWith("/") || input.startsWith("@")) return null;
|
|
835
|
+
const match = input.match(GITHUB_SHORTHAND_PATTERN);
|
|
836
|
+
if (!match) return null;
|
|
837
|
+
const [, owner, repo, path] = match;
|
|
838
|
+
if (!owner || !repo) return null;
|
|
839
|
+
return {
|
|
840
|
+
owner,
|
|
841
|
+
repo,
|
|
842
|
+
path: path || void 0
|
|
843
|
+
};
|
|
844
|
+
}
|
|
845
|
+
//#endregion
|
|
846
|
+
//#region src/agents.ts
|
|
847
|
+
/**
|
|
848
|
+
* Agent configuration for skill symlinks.
|
|
849
|
+
*
|
|
850
|
+
* Defines where different AI agents expect skills to be located.
|
|
851
|
+
*/
|
|
852
|
+
/**
|
|
853
|
+
* Default agent configurations with display names.
|
|
854
|
+
* These can be overridden in pspm.json under the "agents" key.
|
|
855
|
+
*/
|
|
856
|
+
const AGENT_INFO = {
|
|
857
|
+
adal: {
|
|
858
|
+
displayName: "AdaL",
|
|
859
|
+
skillsDir: ".adal/skills",
|
|
860
|
+
globalSkillsDir: ".adal/skills"
|
|
861
|
+
},
|
|
862
|
+
amp: {
|
|
863
|
+
displayName: "Amp",
|
|
864
|
+
skillsDir: ".agents/skills",
|
|
865
|
+
globalSkillsDir: ".config/agents/skills"
|
|
866
|
+
},
|
|
867
|
+
antigravity: {
|
|
868
|
+
displayName: "Antigravity",
|
|
869
|
+
skillsDir: ".agent/skills",
|
|
870
|
+
globalSkillsDir: ".gemini/antigravity/skills"
|
|
871
|
+
},
|
|
872
|
+
augment: {
|
|
873
|
+
displayName: "Augment",
|
|
874
|
+
skillsDir: ".augment/skills",
|
|
875
|
+
globalSkillsDir: ".augment/skills"
|
|
876
|
+
},
|
|
877
|
+
"claude-code": {
|
|
878
|
+
displayName: "Claude Code",
|
|
879
|
+
skillsDir: ".claude/skills",
|
|
880
|
+
globalSkillsDir: ".claude/skills"
|
|
881
|
+
},
|
|
882
|
+
cline: {
|
|
883
|
+
displayName: "Cline",
|
|
884
|
+
skillsDir: ".agents/skills",
|
|
885
|
+
globalSkillsDir: ".agents/skills"
|
|
886
|
+
},
|
|
887
|
+
codebuddy: {
|
|
888
|
+
displayName: "CodeBuddy",
|
|
889
|
+
skillsDir: ".codebuddy/skills",
|
|
890
|
+
globalSkillsDir: ".codebuddy/skills"
|
|
891
|
+
},
|
|
892
|
+
codex: {
|
|
893
|
+
displayName: "Codex",
|
|
894
|
+
skillsDir: ".agents/skills",
|
|
895
|
+
globalSkillsDir: ".codex/skills"
|
|
896
|
+
},
|
|
897
|
+
"command-code": {
|
|
898
|
+
displayName: "Command Code",
|
|
899
|
+
skillsDir: ".commandcode/skills",
|
|
900
|
+
globalSkillsDir: ".commandcode/skills"
|
|
901
|
+
},
|
|
902
|
+
continue: {
|
|
903
|
+
displayName: "Continue",
|
|
904
|
+
skillsDir: ".continue/skills",
|
|
905
|
+
globalSkillsDir: ".continue/skills"
|
|
906
|
+
},
|
|
907
|
+
cortex: {
|
|
908
|
+
displayName: "Cortex Code",
|
|
909
|
+
skillsDir: ".cortex/skills",
|
|
910
|
+
globalSkillsDir: ".snowflake/cortex/skills"
|
|
911
|
+
},
|
|
912
|
+
crush: {
|
|
913
|
+
displayName: "Crush",
|
|
914
|
+
skillsDir: ".crush/skills",
|
|
915
|
+
globalSkillsDir: ".config/crush/skills"
|
|
916
|
+
},
|
|
917
|
+
cursor: {
|
|
918
|
+
displayName: "Cursor",
|
|
919
|
+
skillsDir: ".agents/skills",
|
|
920
|
+
globalSkillsDir: ".cursor/skills"
|
|
921
|
+
},
|
|
922
|
+
droid: {
|
|
923
|
+
displayName: "Droid",
|
|
924
|
+
skillsDir: ".factory/skills",
|
|
925
|
+
globalSkillsDir: ".factory/skills"
|
|
926
|
+
},
|
|
927
|
+
"gemini-cli": {
|
|
928
|
+
displayName: "Gemini CLI",
|
|
929
|
+
skillsDir: ".agents/skills",
|
|
930
|
+
globalSkillsDir: ".gemini/skills"
|
|
931
|
+
},
|
|
932
|
+
"github-copilot": {
|
|
933
|
+
displayName: "GitHub Copilot",
|
|
934
|
+
skillsDir: ".agents/skills",
|
|
935
|
+
globalSkillsDir: ".copilot/skills"
|
|
936
|
+
},
|
|
937
|
+
goose: {
|
|
938
|
+
displayName: "Goose",
|
|
939
|
+
skillsDir: ".goose/skills",
|
|
940
|
+
globalSkillsDir: ".config/goose/skills"
|
|
941
|
+
},
|
|
942
|
+
"iflow-cli": {
|
|
943
|
+
displayName: "iFlow CLI",
|
|
944
|
+
skillsDir: ".iflow/skills",
|
|
945
|
+
globalSkillsDir: ".iflow/skills"
|
|
946
|
+
},
|
|
947
|
+
junie: {
|
|
948
|
+
displayName: "Junie",
|
|
949
|
+
skillsDir: ".junie/skills",
|
|
950
|
+
globalSkillsDir: ".junie/skills"
|
|
951
|
+
},
|
|
952
|
+
kilo: {
|
|
953
|
+
displayName: "Kilo Code",
|
|
954
|
+
skillsDir: ".kilocode/skills",
|
|
955
|
+
globalSkillsDir: ".kilocode/skills"
|
|
956
|
+
},
|
|
957
|
+
"kimi-cli": {
|
|
958
|
+
displayName: "Kimi Code CLI",
|
|
959
|
+
skillsDir: ".agents/skills",
|
|
960
|
+
globalSkillsDir: ".config/agents/skills"
|
|
961
|
+
},
|
|
962
|
+
"kiro-cli": {
|
|
963
|
+
displayName: "Kiro CLI",
|
|
964
|
+
skillsDir: ".kiro/skills",
|
|
965
|
+
globalSkillsDir: ".kiro/skills"
|
|
966
|
+
},
|
|
967
|
+
kode: {
|
|
968
|
+
displayName: "Kode",
|
|
969
|
+
skillsDir: ".kode/skills",
|
|
970
|
+
globalSkillsDir: ".kode/skills"
|
|
971
|
+
},
|
|
972
|
+
mcpjam: {
|
|
973
|
+
displayName: "MCPJam",
|
|
974
|
+
skillsDir: ".mcpjam/skills",
|
|
975
|
+
globalSkillsDir: ".mcpjam/skills"
|
|
976
|
+
},
|
|
977
|
+
"mistral-vibe": {
|
|
978
|
+
displayName: "Mistral Vibe",
|
|
979
|
+
skillsDir: ".vibe/skills",
|
|
980
|
+
globalSkillsDir: ".vibe/skills"
|
|
981
|
+
},
|
|
982
|
+
mux: {
|
|
983
|
+
displayName: "Mux",
|
|
984
|
+
skillsDir: ".mux/skills",
|
|
985
|
+
globalSkillsDir: ".mux/skills"
|
|
986
|
+
},
|
|
987
|
+
neovate: {
|
|
988
|
+
displayName: "Neovate",
|
|
989
|
+
skillsDir: ".neovate/skills",
|
|
990
|
+
globalSkillsDir: ".neovate/skills"
|
|
991
|
+
},
|
|
992
|
+
openclaw: {
|
|
993
|
+
displayName: "OpenClaw",
|
|
994
|
+
skillsDir: "skills",
|
|
995
|
+
globalSkillsDir: ".openclaw/skills"
|
|
996
|
+
},
|
|
997
|
+
opencode: {
|
|
998
|
+
displayName: "OpenCode",
|
|
999
|
+
skillsDir: ".agents/skills",
|
|
1000
|
+
globalSkillsDir: ".config/opencode/skills"
|
|
1001
|
+
},
|
|
1002
|
+
openhands: {
|
|
1003
|
+
displayName: "OpenHands",
|
|
1004
|
+
skillsDir: ".openhands/skills",
|
|
1005
|
+
globalSkillsDir: ".openhands/skills"
|
|
1006
|
+
},
|
|
1007
|
+
pi: {
|
|
1008
|
+
displayName: "Pi",
|
|
1009
|
+
skillsDir: ".pi/skills",
|
|
1010
|
+
globalSkillsDir: ".pi/agent/skills"
|
|
1011
|
+
},
|
|
1012
|
+
pochi: {
|
|
1013
|
+
displayName: "Pochi",
|
|
1014
|
+
skillsDir: ".pochi/skills",
|
|
1015
|
+
globalSkillsDir: ".pochi/skills"
|
|
1016
|
+
},
|
|
1017
|
+
qoder: {
|
|
1018
|
+
displayName: "Qoder",
|
|
1019
|
+
skillsDir: ".qoder/skills",
|
|
1020
|
+
globalSkillsDir: ".qoder/skills"
|
|
1021
|
+
},
|
|
1022
|
+
"qwen-code": {
|
|
1023
|
+
displayName: "Qwen Code",
|
|
1024
|
+
skillsDir: ".qwen/skills",
|
|
1025
|
+
globalSkillsDir: ".qwen/skills"
|
|
1026
|
+
},
|
|
1027
|
+
replit: {
|
|
1028
|
+
displayName: "Replit",
|
|
1029
|
+
skillsDir: ".agents/skills",
|
|
1030
|
+
globalSkillsDir: ".config/agents/skills"
|
|
1031
|
+
},
|
|
1032
|
+
roo: {
|
|
1033
|
+
displayName: "Roo Code",
|
|
1034
|
+
skillsDir: ".roo/skills",
|
|
1035
|
+
globalSkillsDir: ".roo/skills"
|
|
1036
|
+
},
|
|
1037
|
+
trae: {
|
|
1038
|
+
displayName: "Trae",
|
|
1039
|
+
skillsDir: ".trae/skills",
|
|
1040
|
+
globalSkillsDir: ".trae/skills"
|
|
1041
|
+
},
|
|
1042
|
+
"trae-cn": {
|
|
1043
|
+
displayName: "Trae CN",
|
|
1044
|
+
skillsDir: ".trae/skills",
|
|
1045
|
+
globalSkillsDir: ".trae-cn/skills"
|
|
1046
|
+
},
|
|
1047
|
+
universal: {
|
|
1048
|
+
displayName: "Universal",
|
|
1049
|
+
skillsDir: ".agents/skills",
|
|
1050
|
+
globalSkillsDir: ".config/agents/skills"
|
|
1051
|
+
},
|
|
1052
|
+
windsurf: {
|
|
1053
|
+
displayName: "Windsurf",
|
|
1054
|
+
skillsDir: ".windsurf/skills",
|
|
1055
|
+
globalSkillsDir: ".codeium/windsurf/skills"
|
|
1056
|
+
},
|
|
1057
|
+
zencoder: {
|
|
1058
|
+
displayName: "Zencoder",
|
|
1059
|
+
skillsDir: ".zencoder/skills",
|
|
1060
|
+
globalSkillsDir: ".zencoder/skills"
|
|
1061
|
+
}
|
|
1062
|
+
};
|
|
1063
|
+
/**
|
|
1064
|
+
* Default agent configurations (AgentConfig format).
|
|
1065
|
+
*/
|
|
1066
|
+
const DEFAULT_AGENT_CONFIGS = Object.fromEntries(Object.entries(AGENT_INFO).map(([key, info]) => [key, { skillsDir: info.skillsDir }]));
|
|
1067
|
+
/**
|
|
1068
|
+
* All built-in agent names in display order.
|
|
1069
|
+
*/
|
|
1070
|
+
const ALL_AGENTS = Object.keys(AGENT_INFO).sort();
|
|
1071
|
+
/**
|
|
1072
|
+
* Resolve agent configuration by name.
|
|
1073
|
+
*
|
|
1074
|
+
* @param name - Agent name (built-in or custom)
|
|
1075
|
+
* @param overrides - Custom agent configurations from pspm.json
|
|
1076
|
+
* @param global - If true, return global paths instead of project paths
|
|
1077
|
+
* @returns Agent configuration or null if not found
|
|
1078
|
+
*
|
|
1079
|
+
* @example
|
|
1080
|
+
* ```typescript
|
|
1081
|
+
* resolveAgentConfig("claude-code")
|
|
1082
|
+
* // => { skillsDir: ".claude/skills" }
|
|
1083
|
+
*
|
|
1084
|
+
* resolveAgentConfig("claude-code", undefined, true)
|
|
1085
|
+
* // => { skillsDir: ".claude/skills" } (global path, used relative to ~)
|
|
1086
|
+
*
|
|
1087
|
+
* resolveAgentConfig("my-custom", { "my-custom": { skillsDir: ".myagent/prompts" } })
|
|
1088
|
+
* // => { skillsDir: ".myagent/prompts" }
|
|
1089
|
+
* ```
|
|
1090
|
+
*/
|
|
1091
|
+
function resolveAgentConfig(name, overrides, global) {
|
|
1092
|
+
if (!global && overrides?.[name]) return overrides[name];
|
|
1093
|
+
if (name in AGENT_INFO) {
|
|
1094
|
+
const info = AGENT_INFO[name];
|
|
1095
|
+
return { skillsDir: global ? info.globalSkillsDir : info.skillsDir };
|
|
1096
|
+
}
|
|
1097
|
+
return null;
|
|
1098
|
+
}
|
|
1099
|
+
/**
|
|
1100
|
+
* Parse comma-separated agent names from CLI argument.
|
|
1101
|
+
*
|
|
1102
|
+
* @param agentArg - Comma-separated agent names (e.g., "claude-code,cursor")
|
|
1103
|
+
* @returns Array of agent names, or ["none"] if skipping symlinks
|
|
1104
|
+
*
|
|
1105
|
+
* @example
|
|
1106
|
+
* ```typescript
|
|
1107
|
+
* parseAgentArg("claude-code,cursor")
|
|
1108
|
+
* // => ["claude-code", "cursor"]
|
|
1109
|
+
*
|
|
1110
|
+
* parseAgentArg("none")
|
|
1111
|
+
* // => ["none"]
|
|
1112
|
+
*
|
|
1113
|
+
* parseAgentArg(undefined)
|
|
1114
|
+
* // => [...ALL_AGENTS]
|
|
1115
|
+
* ```
|
|
1116
|
+
*/
|
|
1117
|
+
function parseAgentArg(agentArg) {
|
|
1118
|
+
if (!agentArg) return [...ALL_AGENTS];
|
|
1119
|
+
if (agentArg === "none") return ["none"];
|
|
1120
|
+
return agentArg.split(",").map((a) => a.trim()).filter(Boolean);
|
|
1121
|
+
}
|
|
1122
|
+
/**
|
|
1123
|
+
* Get all available agent names (built-in + custom).
|
|
1124
|
+
*/
|
|
1125
|
+
function getAvailableAgents(overrides) {
|
|
1126
|
+
const builtIn = Object.keys(DEFAULT_AGENT_CONFIGS);
|
|
1127
|
+
const custom = overrides ? Object.keys(overrides) : [];
|
|
1128
|
+
return [...new Set([...builtIn, ...custom])];
|
|
1129
|
+
}
|
|
1130
|
+
/**
|
|
1131
|
+
* Prompt user to select which agents to install skills to.
|
|
1132
|
+
*
|
|
1133
|
+
* @returns Array of selected agent names
|
|
1134
|
+
*/
|
|
1135
|
+
async function promptForAgents() {
|
|
1136
|
+
const selected = await checkbox({
|
|
1137
|
+
message: "Select agents to install skills to",
|
|
1138
|
+
choices: ALL_AGENTS.map((agent) => ({
|
|
1139
|
+
name: `${AGENT_INFO[agent].displayName} (${AGENT_INFO[agent].skillsDir})`,
|
|
1140
|
+
value: agent,
|
|
1141
|
+
checked: true
|
|
1142
|
+
}))
|
|
1143
|
+
});
|
|
1144
|
+
if (selected.length === 0) return ["none"];
|
|
1145
|
+
return selected;
|
|
1146
|
+
}
|
|
1147
|
+
//#endregion
|
|
1148
|
+
//#region src/manifest.ts
|
|
1149
|
+
/**
|
|
1150
|
+
* Get the manifest file path
|
|
1151
|
+
* Global: ~/.pspm/pspm.json
|
|
1152
|
+
* Project: ./pspm.json
|
|
1153
|
+
*/
|
|
1154
|
+
function getManifestPath() {
|
|
1155
|
+
if (isGlobalMode()) return join(homedir(), ".pspm", "pspm.json");
|
|
1156
|
+
return join(process.cwd(), "pspm.json");
|
|
1157
|
+
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Read the manifest file (pspm.json)
|
|
1160
|
+
* Returns null if file doesn't exist
|
|
1161
|
+
*/
|
|
1162
|
+
async function readManifest() {
|
|
1163
|
+
try {
|
|
1164
|
+
const content = await readFile(getManifestPath(), "utf-8");
|
|
1165
|
+
return JSON.parse(content);
|
|
1166
|
+
} catch {
|
|
1167
|
+
return null;
|
|
1168
|
+
}
|
|
1169
|
+
}
|
|
1170
|
+
/**
|
|
1171
|
+
* Write the manifest file (pspm.json)
|
|
1172
|
+
*/
|
|
1173
|
+
async function writeManifest(manifest) {
|
|
1174
|
+
const content = JSON.stringify(manifest, null, 2);
|
|
1175
|
+
await writeFile(getManifestPath(), `${content}\n`);
|
|
1176
|
+
}
|
|
1177
|
+
/**
|
|
1178
|
+
* Create a minimal manifest with just dependencies
|
|
1179
|
+
* Similar to how npm creates package.json with just dependencies when you run `npm add`
|
|
1180
|
+
* This is for consuming packages, not publishing - so only dependencies are needed
|
|
1181
|
+
*/
|
|
1182
|
+
async function createMinimalManifest() {
|
|
1183
|
+
return { dependencies: {} };
|
|
1184
|
+
}
|
|
1185
|
+
/**
|
|
1186
|
+
* Ensure manifest exists, creating a minimal one if needed
|
|
1187
|
+
* Returns the manifest (existing or newly created)
|
|
1188
|
+
*/
|
|
1189
|
+
async function ensureManifest() {
|
|
1190
|
+
let manifest = await readManifest();
|
|
1191
|
+
if (!manifest) {
|
|
1192
|
+
manifest = await createMinimalManifest();
|
|
1193
|
+
await writeManifest(manifest);
|
|
1194
|
+
}
|
|
1195
|
+
return manifest;
|
|
1196
|
+
}
|
|
1197
|
+
/**
|
|
1198
|
+
* Add a dependency to the manifest
|
|
1199
|
+
* Creates the manifest if it doesn't exist
|
|
1200
|
+
*
|
|
1201
|
+
* @param skillName - Full skill name (e.g., "@user/alice/my-skill")
|
|
1202
|
+
* @param versionRange - Version range to save (e.g., "^1.0.0")
|
|
1203
|
+
*/
|
|
1204
|
+
async function addDependency(skillName, versionRange) {
|
|
1205
|
+
const manifest = await ensureManifest();
|
|
1206
|
+
if (!manifest.dependencies) manifest.dependencies = {};
|
|
1207
|
+
manifest.dependencies[skillName] = versionRange;
|
|
1208
|
+
await writeManifest(manifest);
|
|
1209
|
+
}
|
|
1210
|
+
/**
|
|
1211
|
+
* Remove a dependency from the manifest
|
|
1212
|
+
*
|
|
1213
|
+
* @param skillName - Full skill name (e.g., "@user/alice/my-skill")
|
|
1214
|
+
* @returns true if dependency was removed, false if it didn't exist
|
|
1215
|
+
*/
|
|
1216
|
+
async function removeDependency(skillName) {
|
|
1217
|
+
const manifest = await readManifest();
|
|
1218
|
+
if (!manifest?.dependencies?.[skillName]) return false;
|
|
1219
|
+
delete manifest.dependencies[skillName];
|
|
1220
|
+
await writeManifest(manifest);
|
|
1221
|
+
return true;
|
|
1222
|
+
}
|
|
1223
|
+
/**
|
|
1224
|
+
* Get all dependencies from the manifest
|
|
1225
|
+
* Returns empty object if manifest doesn't exist or has no dependencies
|
|
1226
|
+
*/
|
|
1227
|
+
async function getDependencies() {
|
|
1228
|
+
return (await readManifest())?.dependencies ?? {};
|
|
1229
|
+
}
|
|
1230
|
+
/**
|
|
1231
|
+
* Get all GitHub dependencies from the manifest
|
|
1232
|
+
* Returns empty object if manifest doesn't exist or has no GitHub dependencies
|
|
1233
|
+
*/
|
|
1234
|
+
async function getGitHubDependencies() {
|
|
1235
|
+
return (await readManifest())?.githubDependencies ?? {};
|
|
1236
|
+
}
|
|
1237
|
+
/**
|
|
1238
|
+
* Add a GitHub dependency to the manifest
|
|
1239
|
+
* Creates the manifest if it doesn't exist
|
|
1240
|
+
*
|
|
1241
|
+
* @param specifier - GitHub specifier (e.g., "github:owner/repo/path")
|
|
1242
|
+
* @param ref - Git ref (branch, tag, or "latest")
|
|
1243
|
+
*/
|
|
1244
|
+
async function addGitHubDependency(specifier, ref) {
|
|
1245
|
+
const manifest = await ensureManifest();
|
|
1246
|
+
if (!manifest.githubDependencies) manifest.githubDependencies = {};
|
|
1247
|
+
manifest.githubDependencies[specifier] = ref;
|
|
1248
|
+
await writeManifest(manifest);
|
|
1249
|
+
}
|
|
1250
|
+
/**
|
|
1251
|
+
* Remove a GitHub dependency from the manifest
|
|
1252
|
+
*
|
|
1253
|
+
* @param specifier - GitHub specifier (e.g., "github:owner/repo/path")
|
|
1254
|
+
* @returns true if dependency was removed, false if it didn't exist
|
|
1255
|
+
*/
|
|
1256
|
+
async function removeGitHubDependency(specifier) {
|
|
1257
|
+
const manifest = await readManifest();
|
|
1258
|
+
if (!manifest?.githubDependencies?.[specifier]) return false;
|
|
1259
|
+
delete manifest.githubDependencies[specifier];
|
|
1260
|
+
await writeManifest(manifest);
|
|
1261
|
+
return true;
|
|
1262
|
+
}
|
|
1263
|
+
/**
|
|
1264
|
+
* Add a local dependency to the manifest
|
|
1265
|
+
* Creates the manifest if it doesn't exist
|
|
1266
|
+
*
|
|
1267
|
+
* @param specifier - Local specifier (e.g., "file:../my-skill")
|
|
1268
|
+
* @param version - Always "*" for local packages
|
|
1269
|
+
*/
|
|
1270
|
+
async function addLocalDependency(specifier, version = "*") {
|
|
1271
|
+
const manifest = await ensureManifest();
|
|
1272
|
+
if (!manifest.localDependencies) manifest.localDependencies = {};
|
|
1273
|
+
manifest.localDependencies[specifier] = version;
|
|
1274
|
+
await writeManifest(manifest);
|
|
1275
|
+
}
|
|
1276
|
+
/**
|
|
1277
|
+
* Add a well-known dependency to the manifest
|
|
1278
|
+
*
|
|
1279
|
+
* @param baseUrl - The well-known base URL (e.g., "https://acme.com")
|
|
1280
|
+
* @param skillNames - Skill names to add (e.g., ["code-review"])
|
|
1281
|
+
*/
|
|
1282
|
+
async function addWellKnownDependency(baseUrl, skillNames) {
|
|
1283
|
+
const manifest = await ensureManifest();
|
|
1284
|
+
if (!manifest.wellKnownDependencies) manifest.wellKnownDependencies = {};
|
|
1285
|
+
const existing = manifest.wellKnownDependencies[baseUrl];
|
|
1286
|
+
if (Array.isArray(existing)) {
|
|
1287
|
+
const merged = [...new Set([...existing, ...skillNames])];
|
|
1288
|
+
manifest.wellKnownDependencies[baseUrl] = merged;
|
|
1289
|
+
} else manifest.wellKnownDependencies[baseUrl] = skillNames;
|
|
1290
|
+
await writeManifest(manifest);
|
|
1291
|
+
}
|
|
1292
|
+
//#endregion
|
|
1293
|
+
//#region src/github.ts
|
|
1294
|
+
/**
|
|
1295
|
+
* GitHub package download and extraction support.
|
|
1296
|
+
*
|
|
1297
|
+
* Downloads skill packages from GitHub repositories and extracts them
|
|
1298
|
+
* to .pspm/skills/_github/{owner}/{repo}/{path}/
|
|
1299
|
+
*/
|
|
1300
|
+
/**
|
|
1301
|
+
* Error thrown when GitHub API rate limit is hit.
|
|
1302
|
+
*/
|
|
1303
|
+
var GitHubRateLimitError = class extends Error {
|
|
1304
|
+
constructor() {
|
|
1305
|
+
super("GitHub API rate limit exceeded. Set GITHUB_TOKEN environment variable for higher limits.");
|
|
1306
|
+
this.name = "GitHubRateLimitError";
|
|
1307
|
+
}
|
|
1308
|
+
};
|
|
1309
|
+
/**
|
|
1310
|
+
* Error thrown when GitHub repository/ref is not found.
|
|
1311
|
+
*/
|
|
1312
|
+
var GitHubNotFoundError = class extends Error {
|
|
1313
|
+
constructor(spec) {
|
|
1314
|
+
const path = spec.path ? `/${spec.path}` : "";
|
|
1315
|
+
const ref = spec.ref ? `@${spec.ref}` : "";
|
|
1316
|
+
super(`GitHub repository not found: ${spec.owner}/${spec.repo}${path}${ref}`);
|
|
1317
|
+
this.name = "GitHubNotFoundError";
|
|
1318
|
+
}
|
|
1319
|
+
};
|
|
1320
|
+
/**
|
|
1321
|
+
* Error thrown when the specified path doesn't exist in the repository.
|
|
1322
|
+
*/
|
|
1323
|
+
var GitHubPathNotFoundError = class extends Error {
|
|
1324
|
+
constructor(spec, availablePaths) {
|
|
1325
|
+
const pathInfo = availablePaths?.length ? `\nAvailable paths in repository root:\n ${availablePaths.join("\n ")}` : "";
|
|
1326
|
+
super(`Path "${spec.path}" not found in ${spec.owner}/${spec.repo}${pathInfo}`);
|
|
1327
|
+
this.name = "GitHubPathNotFoundError";
|
|
1328
|
+
}
|
|
1329
|
+
};
|
|
1330
|
+
/**
|
|
1331
|
+
* Get GitHub API headers, including authentication if available.
|
|
1332
|
+
*/
|
|
1333
|
+
function getGitHubHeaders() {
|
|
1334
|
+
const headers = {
|
|
1335
|
+
Accept: "application/vnd.github+json",
|
|
1336
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
1337
|
+
"User-Agent": "pspm-cli"
|
|
1338
|
+
};
|
|
1339
|
+
const token = process.env.GITHUB_TOKEN;
|
|
1340
|
+
if (token) headers.Authorization = `Bearer ${token}`;
|
|
1341
|
+
return headers;
|
|
1342
|
+
}
|
|
1343
|
+
/**
|
|
1344
|
+
* Resolve a Git ref (branch/tag) to a commit SHA.
|
|
1345
|
+
*
|
|
1346
|
+
* @param owner - Repository owner
|
|
1347
|
+
* @param repo - Repository name
|
|
1348
|
+
* @param ref - Branch, tag, or commit SHA (defaults to default branch)
|
|
1349
|
+
* @returns Resolved commit SHA
|
|
1350
|
+
*/
|
|
1351
|
+
async function resolveGitHubRef(owner, repo, ref) {
|
|
1352
|
+
const headers = getGitHubHeaders();
|
|
1353
|
+
let resolvedRef = ref;
|
|
1354
|
+
if (!resolvedRef || resolvedRef === "latest") {
|
|
1355
|
+
const repoUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
|
1356
|
+
const repoResponse = await fetch(repoUrl, { headers });
|
|
1357
|
+
if (repoResponse.status === 404) throw new GitHubNotFoundError({
|
|
1358
|
+
owner,
|
|
1359
|
+
repo
|
|
1360
|
+
});
|
|
1361
|
+
if (repoResponse.status === 403) {
|
|
1362
|
+
if (repoResponse.headers.get("x-ratelimit-remaining") === "0") throw new GitHubRateLimitError();
|
|
1363
|
+
}
|
|
1364
|
+
if (!repoResponse.ok) throw new Error(`GitHub API error: ${repoResponse.status}`);
|
|
1365
|
+
resolvedRef = (await repoResponse.json()).default_branch;
|
|
1366
|
+
}
|
|
1367
|
+
const commitUrl = `https://api.github.com/repos/${owner}/${repo}/commits/${resolvedRef}`;
|
|
1368
|
+
const commitResponse = await fetch(commitUrl, { headers });
|
|
1369
|
+
if (commitResponse.status === 404) throw new GitHubNotFoundError({
|
|
1370
|
+
owner,
|
|
1371
|
+
repo,
|
|
1372
|
+
ref
|
|
1373
|
+
});
|
|
1374
|
+
if (commitResponse.status === 403) {
|
|
1375
|
+
if (commitResponse.headers.get("x-ratelimit-remaining") === "0") throw new GitHubRateLimitError();
|
|
1376
|
+
}
|
|
1377
|
+
if (!commitResponse.ok) throw new Error(`GitHub API error: ${commitResponse.status}`);
|
|
1378
|
+
return (await commitResponse.json()).sha;
|
|
1379
|
+
}
|
|
1380
|
+
/**
|
|
1381
|
+
* Download a GitHub repository tarball.
|
|
1382
|
+
*
|
|
1383
|
+
* @param spec - GitHub specifier with owner, repo, and optional ref
|
|
1384
|
+
* @returns Download result with buffer, commit SHA, and integrity hash
|
|
1385
|
+
*/
|
|
1386
|
+
async function downloadGitHubPackage(spec) {
|
|
1387
|
+
const headers = getGitHubHeaders();
|
|
1388
|
+
const commit = await resolveGitHubRef(spec.owner, spec.repo, spec.ref);
|
|
1389
|
+
const tarballUrl = `https://api.github.com/repos/${spec.owner}/${spec.repo}/tarball/${commit}`;
|
|
1390
|
+
const response = await fetch(tarballUrl, {
|
|
1391
|
+
headers,
|
|
1392
|
+
redirect: "follow"
|
|
1393
|
+
});
|
|
1394
|
+
if (response.status === 404) throw new GitHubNotFoundError(spec);
|
|
1395
|
+
if (response.status === 403) {
|
|
1396
|
+
if (response.headers.get("x-ratelimit-remaining") === "0") throw new GitHubRateLimitError();
|
|
1397
|
+
}
|
|
1398
|
+
if (!response.ok) throw new Error(`Failed to download GitHub tarball: ${response.status}`);
|
|
1399
|
+
const buffer = Buffer.from(await response.arrayBuffer());
|
|
1400
|
+
return {
|
|
1401
|
+
buffer,
|
|
1402
|
+
commit,
|
|
1403
|
+
integrity: calculateIntegrity(buffer)
|
|
1404
|
+
};
|
|
1405
|
+
}
|
|
1406
|
+
/**
|
|
1407
|
+
* Extract a GitHub package to the skills directory.
|
|
1408
|
+
*
|
|
1409
|
+
* For subpath specifiers, extracts only the specified subdirectory.
|
|
1410
|
+
* Full path structure is preserved under .pspm/skills/_github/.
|
|
1411
|
+
*
|
|
1412
|
+
* @param spec - GitHub specifier
|
|
1413
|
+
* @param buffer - Downloaded tarball buffer
|
|
1414
|
+
* @param skillsDir - Base skills directory (.pspm/skills)
|
|
1415
|
+
* @returns Path to extracted skill (relative to project root)
|
|
1416
|
+
*/
|
|
1417
|
+
async function extractGitHubPackage(spec, buffer, skillsDir) {
|
|
1418
|
+
const destPath = spec.path ? join(skillsDir, "_github", spec.owner, spec.repo, spec.path) : join(skillsDir, "_github", spec.owner, spec.repo);
|
|
1419
|
+
const tempDir = join(skillsDir, "_github", ".temp", `${Date.now()}`);
|
|
1420
|
+
await mkdir(tempDir, { recursive: true });
|
|
1421
|
+
const tempFile = join(tempDir, "archive.tgz");
|
|
1422
|
+
try {
|
|
1423
|
+
await writeFile(tempFile, buffer);
|
|
1424
|
+
const { exec } = await import("node:child_process");
|
|
1425
|
+
const { promisify } = await import("node:util");
|
|
1426
|
+
await promisify(exec)(`tar -xzf "${tempFile}" -C "${tempDir}"`);
|
|
1427
|
+
const extractedDir = (await readdir(tempDir)).find((e) => e !== "archive.tgz" && !e.startsWith("."));
|
|
1428
|
+
if (!extractedDir) throw new Error("Failed to find extracted directory in tarball");
|
|
1429
|
+
const sourcePath = join(tempDir, extractedDir);
|
|
1430
|
+
const copySource = spec.path ? join(sourcePath, spec.path) : sourcePath;
|
|
1431
|
+
if (spec.path) {
|
|
1432
|
+
if (!await lstat(copySource).catch(() => null)) {
|
|
1433
|
+
const rootEntries = await readdir(sourcePath);
|
|
1434
|
+
const dirs = [];
|
|
1435
|
+
for (const entry of rootEntries) if ((await lstat(join(sourcePath, entry)).catch(() => null))?.isDirectory() && !entry.startsWith(".")) dirs.push(entry);
|
|
1436
|
+
throw new GitHubPathNotFoundError(spec, dirs);
|
|
1437
|
+
}
|
|
1438
|
+
}
|
|
1439
|
+
await rm(destPath, {
|
|
1440
|
+
recursive: true,
|
|
1441
|
+
force: true
|
|
1442
|
+
});
|
|
1443
|
+
await mkdir(destPath, { recursive: true });
|
|
1444
|
+
await cp(copySource, destPath, { recursive: true });
|
|
1445
|
+
return spec.path ? `.pspm/skills/_github/${spec.owner}/${spec.repo}/${spec.path}` : `.pspm/skills/_github/${spec.owner}/${spec.repo}`;
|
|
1446
|
+
} finally {
|
|
1447
|
+
await rm(tempDir, {
|
|
1448
|
+
recursive: true,
|
|
1449
|
+
force: true
|
|
1450
|
+
});
|
|
1451
|
+
}
|
|
1452
|
+
}
|
|
1453
|
+
/**
|
|
1454
|
+
* Get a short display name for a GitHub package.
|
|
1455
|
+
*
|
|
1456
|
+
* @param spec - GitHub specifier
|
|
1457
|
+
* @param commit - Resolved commit SHA (first 7 chars will be shown)
|
|
1458
|
+
* @returns Display string like "github:owner/repo/path (ref@abc1234)"
|
|
1459
|
+
*/
|
|
1460
|
+
function getGitHubDisplayName(spec, commit) {
|
|
1461
|
+
let name = `github:${spec.owner}/${spec.repo}`;
|
|
1462
|
+
if (spec.path) name += `/${spec.path}`;
|
|
1463
|
+
if (spec.ref || commit) {
|
|
1464
|
+
const ref = spec.ref || "HEAD";
|
|
1465
|
+
const shortCommit = commit ? commit.slice(0, 7) : "";
|
|
1466
|
+
name += ` (${ref}${shortCommit ? `@${shortCommit}` : ""})`;
|
|
1467
|
+
}
|
|
1468
|
+
return name;
|
|
1469
|
+
}
|
|
1470
|
+
//#endregion
|
|
1471
|
+
//#region src/lockfile.ts
|
|
1472
|
+
/**
|
|
1473
|
+
* Check if legacy lockfile exists (skill-lock.json)
|
|
1474
|
+
*/
|
|
1475
|
+
async function hasLegacyLockfile() {
|
|
1476
|
+
try {
|
|
1477
|
+
await stat(getLegacyLockfilePath());
|
|
1478
|
+
return true;
|
|
1479
|
+
} catch {
|
|
1480
|
+
return false;
|
|
1481
|
+
}
|
|
1482
|
+
}
|
|
1483
|
+
/**
|
|
1484
|
+
* Migrate legacy lockfile (skill-lock.json) to new format (pspm-lock.json)
|
|
1485
|
+
* Returns true if migration was performed
|
|
1486
|
+
*/
|
|
1487
|
+
async function migrateLockfileIfNeeded() {
|
|
1488
|
+
const legacyPath = getLegacyLockfilePath();
|
|
1489
|
+
const newPath = getLockfilePath();
|
|
1490
|
+
try {
|
|
1491
|
+
await stat(legacyPath);
|
|
1492
|
+
} catch {
|
|
1493
|
+
return false;
|
|
1494
|
+
}
|
|
1495
|
+
try {
|
|
1496
|
+
await stat(newPath);
|
|
1497
|
+
return false;
|
|
1498
|
+
} catch {}
|
|
1499
|
+
try {
|
|
1500
|
+
const content = await readFile(legacyPath, "utf-8");
|
|
1501
|
+
const oldLockfile = JSON.parse(content);
|
|
1502
|
+
const newLockfile = {
|
|
1503
|
+
lockfileVersion: 2,
|
|
1504
|
+
registryUrl: oldLockfile.registryUrl,
|
|
1505
|
+
packages: oldLockfile.skills ?? {}
|
|
1506
|
+
};
|
|
1507
|
+
await writeFile(newPath, `${JSON.stringify(newLockfile, null, 2)}\n`);
|
|
1508
|
+
console.log("Migrated lockfile: skill-lock.json → pspm-lock.json");
|
|
1509
|
+
return true;
|
|
1510
|
+
} catch {
|
|
1511
|
+
return false;
|
|
1512
|
+
}
|
|
1513
|
+
}
|
|
1514
|
+
/**
|
|
1515
|
+
* Read the lockfile, automatically checking for legacy format
|
|
1516
|
+
*/
|
|
1517
|
+
async function readLockfile() {
|
|
1518
|
+
const lockfilePath = getLockfilePath();
|
|
1519
|
+
try {
|
|
1520
|
+
const content = await readFile(lockfilePath, "utf-8");
|
|
1521
|
+
const lockfile = JSON.parse(content);
|
|
1522
|
+
if (lockfile.lockfileVersion === 1 && lockfile.skills && !lockfile.packages) return {
|
|
1523
|
+
...lockfile,
|
|
1524
|
+
lockfileVersion: 2,
|
|
1525
|
+
packages: lockfile.skills
|
|
1526
|
+
};
|
|
1527
|
+
return lockfile;
|
|
1528
|
+
} catch {
|
|
1529
|
+
if (await hasLegacyLockfile()) try {
|
|
1530
|
+
const content = await readFile(getLegacyLockfilePath(), "utf-8");
|
|
1531
|
+
const legacyLockfile = JSON.parse(content);
|
|
1532
|
+
return {
|
|
1533
|
+
lockfileVersion: 2,
|
|
1534
|
+
registryUrl: legacyLockfile.registryUrl,
|
|
1535
|
+
packages: legacyLockfile.skills ?? {}
|
|
1536
|
+
};
|
|
1537
|
+
} catch {
|
|
1538
|
+
return null;
|
|
1539
|
+
}
|
|
1540
|
+
return null;
|
|
1541
|
+
}
|
|
1542
|
+
}
|
|
1543
|
+
/**
|
|
1544
|
+
* Write the lockfile (v4 format if any package has dependencies, otherwise v3)
|
|
1545
|
+
*/
|
|
1546
|
+
async function writeLockfile(lockfile) {
|
|
1547
|
+
const lockfilePath = getLockfilePath();
|
|
1548
|
+
await mkdir(dirname(lockfilePath), { recursive: true });
|
|
1549
|
+
const packages = lockfile.packages ?? lockfile.skills ?? {};
|
|
1550
|
+
const normalized = {
|
|
1551
|
+
$schema: PSPM_LOCKFILE_SCHEMA_URL,
|
|
1552
|
+
lockfileVersion: Object.values(packages).some((pkg) => pkg.dependencies && Object.keys(pkg.dependencies).length > 0) ? 4 : 3,
|
|
1553
|
+
registryUrl: lockfile.registryUrl,
|
|
1554
|
+
packages
|
|
1555
|
+
};
|
|
1556
|
+
if (lockfile.githubPackages && Object.keys(lockfile.githubPackages).length > 0) normalized.githubPackages = lockfile.githubPackages;
|
|
1557
|
+
if (lockfile.localPackages && Object.keys(lockfile.localPackages).length > 0) normalized.localPackages = lockfile.localPackages;
|
|
1558
|
+
if (lockfile.wellKnownPackages && Object.keys(lockfile.wellKnownPackages).length > 0) normalized.wellKnownPackages = lockfile.wellKnownPackages;
|
|
1559
|
+
await writeFile(lockfilePath, `${JSON.stringify(normalized, null, 2)}\n`);
|
|
1560
|
+
}
|
|
1561
|
+
/**
|
|
1562
|
+
* Create a new empty lockfile (v4 format)
|
|
1563
|
+
*/
|
|
1564
|
+
async function createEmptyLockfile() {
|
|
1565
|
+
return {
|
|
1566
|
+
lockfileVersion: 4,
|
|
1567
|
+
registryUrl: await getRegistryUrl(),
|
|
1568
|
+
packages: {}
|
|
1569
|
+
};
|
|
1570
|
+
}
|
|
1571
|
+
/**
|
|
1572
|
+
* Get packages from lockfile (handles both v1 and v2)
|
|
1573
|
+
*/
|
|
1574
|
+
function getPackages(lockfile) {
|
|
1575
|
+
return lockfile.packages ?? lockfile.skills ?? {};
|
|
1576
|
+
}
|
|
1577
|
+
/**
|
|
1578
|
+
* Add a skill to the lockfile
|
|
1579
|
+
*/
|
|
1580
|
+
async function addToLockfile(fullName, entry) {
|
|
1581
|
+
let lockfile = await readLockfile();
|
|
1582
|
+
if (!lockfile) lockfile = await createEmptyLockfile();
|
|
1583
|
+
const packages = getPackages(lockfile);
|
|
1584
|
+
packages[fullName] = entry;
|
|
1585
|
+
lockfile.packages = packages;
|
|
1586
|
+
await writeLockfile(lockfile);
|
|
1587
|
+
}
|
|
1588
|
+
/**
|
|
1589
|
+
* Add a skill to the lockfile with dependencies (v4 format)
|
|
1590
|
+
*/
|
|
1591
|
+
async function addToLockfileWithDeps(fullName, entry, dependencies) {
|
|
1592
|
+
let lockfile = await readLockfile();
|
|
1593
|
+
if (!lockfile) lockfile = await createEmptyLockfile();
|
|
1594
|
+
const packages = getPackages(lockfile);
|
|
1595
|
+
const entryWithDeps = { ...entry };
|
|
1596
|
+
if (dependencies && Object.keys(dependencies).length > 0) entryWithDeps.dependencies = dependencies;
|
|
1597
|
+
packages[fullName] = entryWithDeps;
|
|
1598
|
+
lockfile.packages = packages;
|
|
1599
|
+
await writeLockfile(lockfile);
|
|
1600
|
+
}
|
|
1601
|
+
/**
|
|
1602
|
+
* Remove a skill from the lockfile
|
|
1603
|
+
*/
|
|
1604
|
+
async function removeFromLockfile(fullName) {
|
|
1605
|
+
const lockfile = await readLockfile();
|
|
1606
|
+
if (!lockfile) return false;
|
|
1607
|
+
const packages = getPackages(lockfile);
|
|
1608
|
+
if (!packages[fullName]) return false;
|
|
1609
|
+
delete packages[fullName];
|
|
1610
|
+
lockfile.packages = packages;
|
|
1611
|
+
await writeLockfile(lockfile);
|
|
1612
|
+
return true;
|
|
1613
|
+
}
|
|
1614
|
+
/**
|
|
1615
|
+
* List all skills in the lockfile
|
|
1616
|
+
*/
|
|
1617
|
+
async function listLockfileSkills() {
|
|
1618
|
+
const lockfile = await readLockfile();
|
|
1619
|
+
if (!lockfile) return [];
|
|
1620
|
+
const packages = getPackages(lockfile);
|
|
1621
|
+
return Object.entries(packages).map(([name, entry]) => ({
|
|
1622
|
+
name,
|
|
1623
|
+
entry
|
|
1624
|
+
}));
|
|
1625
|
+
}
|
|
1626
|
+
/**
|
|
1627
|
+
* Add a GitHub package to the lockfile
|
|
1628
|
+
*/
|
|
1629
|
+
async function addGitHubToLockfile(specifier, entry) {
|
|
1630
|
+
let lockfile = await readLockfile();
|
|
1631
|
+
if (!lockfile) lockfile = await createEmptyLockfile();
|
|
1632
|
+
if (!lockfile.githubPackages) lockfile.githubPackages = {};
|
|
1633
|
+
lockfile.githubPackages[specifier] = entry;
|
|
1634
|
+
await writeLockfile(lockfile);
|
|
1635
|
+
}
|
|
1636
|
+
/**
|
|
1637
|
+
* Remove a GitHub package from the lockfile
|
|
1638
|
+
*/
|
|
1639
|
+
async function removeGitHubFromLockfile(specifier) {
|
|
1640
|
+
const lockfile = await readLockfile();
|
|
1641
|
+
if (!lockfile?.githubPackages?.[specifier]) return false;
|
|
1642
|
+
delete lockfile.githubPackages[specifier];
|
|
1643
|
+
await writeLockfile(lockfile);
|
|
1644
|
+
return true;
|
|
1645
|
+
}
|
|
1646
|
+
/**
|
|
1647
|
+
* List all GitHub packages in the lockfile
|
|
1648
|
+
*/
|
|
1649
|
+
async function listLockfileGitHubPackages() {
|
|
1650
|
+
const lockfile = await readLockfile();
|
|
1651
|
+
if (!lockfile?.githubPackages) return [];
|
|
1652
|
+
return Object.entries(lockfile.githubPackages).map(([specifier, entry]) => ({
|
|
1653
|
+
specifier,
|
|
1654
|
+
entry
|
|
1655
|
+
}));
|
|
1656
|
+
}
|
|
1657
|
+
/**
|
|
1658
|
+
* Add a local package to the lockfile
|
|
1659
|
+
*/
|
|
1660
|
+
async function addLocalToLockfile(specifier, entry) {
|
|
1661
|
+
let lockfile = await readLockfile();
|
|
1662
|
+
if (!lockfile) lockfile = await createEmptyLockfile();
|
|
1663
|
+
if (!lockfile.localPackages) lockfile.localPackages = {};
|
|
1664
|
+
lockfile.localPackages[specifier] = entry;
|
|
1665
|
+
await writeLockfile(lockfile);
|
|
1666
|
+
}
|
|
1667
|
+
/**
|
|
1668
|
+
* Add a well-known package to the lockfile
|
|
1669
|
+
*/
|
|
1670
|
+
async function addWellKnownToLockfile(specifier, entry) {
|
|
1671
|
+
let lockfile = await readLockfile();
|
|
1672
|
+
if (!lockfile) lockfile = await createEmptyLockfile();
|
|
1673
|
+
if (!lockfile.wellKnownPackages) lockfile.wellKnownPackages = {};
|
|
1674
|
+
lockfile.wellKnownPackages[specifier] = entry;
|
|
1675
|
+
await writeLockfile(lockfile);
|
|
1676
|
+
}
|
|
1677
|
+
/**
|
|
1678
|
+
* List all well-known packages in the lockfile
|
|
1679
|
+
*/
|
|
1680
|
+
async function listLockfileWellKnownPackages() {
|
|
1681
|
+
const lockfile = await readLockfile();
|
|
1682
|
+
if (!lockfile?.wellKnownPackages) return [];
|
|
1683
|
+
return Object.entries(lockfile.wellKnownPackages).map(([specifier, entry]) => ({
|
|
1684
|
+
specifier,
|
|
1685
|
+
entry
|
|
1686
|
+
}));
|
|
1687
|
+
}
|
|
1688
|
+
//#endregion
|
|
1689
|
+
//#region src/symlinks.ts
|
|
1690
|
+
/**
|
|
1691
|
+
* Symlink management for agent skill directories.
|
|
1692
|
+
*
|
|
1693
|
+
* Creates relative symlinks from agent-specific directories (e.g., .claude/skills/)
|
|
1694
|
+
* to the central .pspm/skills/ directory for portability.
|
|
1695
|
+
*/
|
|
1696
|
+
/**
|
|
1697
|
+
* Create symlinks for all skills to specified agent directories.
|
|
1698
|
+
*
|
|
1699
|
+
* @param skills - List of skills to create symlinks for
|
|
1700
|
+
* @param options - Symlink creation options
|
|
1701
|
+
*/
|
|
1702
|
+
async function createAgentSymlinks(skills, options) {
|
|
1703
|
+
const { agents, projectRoot, agentConfigs } = options;
|
|
1704
|
+
if (agents.length === 1 && agents[0] === "none") return;
|
|
1705
|
+
for (const agentName of agents) {
|
|
1706
|
+
const config = resolveAgentConfig(agentName, agentConfigs, options.global);
|
|
1707
|
+
if (!config) {
|
|
1708
|
+
console.warn(`Warning: Unknown agent "${agentName}", skipping symlinks`);
|
|
1709
|
+
continue;
|
|
1710
|
+
}
|
|
1711
|
+
const agentSkillsDir = join(projectRoot, config.skillsDir);
|
|
1712
|
+
await mkdir(agentSkillsDir, { recursive: true });
|
|
1713
|
+
for (const skill of skills) {
|
|
1714
|
+
const symlinkPath = join(agentSkillsDir, skill.name);
|
|
1715
|
+
const targetPath = join(projectRoot, skill.sourcePath);
|
|
1716
|
+
await createSymlink(symlinkPath, relative(dirname(symlinkPath), targetPath), skill.name);
|
|
1717
|
+
}
|
|
1718
|
+
}
|
|
1719
|
+
}
|
|
1720
|
+
/**
|
|
1721
|
+
* Create a single symlink, handling existing files/symlinks.
|
|
1722
|
+
*
|
|
1723
|
+
* @param symlinkPath - Absolute path where symlink will be created
|
|
1724
|
+
* @param target - Relative path to target (relative to symlink's parent dir)
|
|
1725
|
+
* @param skillName - Name for logging
|
|
1726
|
+
*/
|
|
1727
|
+
async function createSymlink(symlinkPath, target, skillName) {
|
|
1728
|
+
try {
|
|
1729
|
+
const stats = await lstat(symlinkPath).catch(() => null);
|
|
1730
|
+
if (stats) if (stats.isSymbolicLink()) {
|
|
1731
|
+
if (await readlink(symlinkPath) === target) return;
|
|
1732
|
+
await rm(symlinkPath);
|
|
1733
|
+
} else {
|
|
1734
|
+
console.warn(`Warning: File exists at symlink path for "${skillName}", skipping: ${symlinkPath}`);
|
|
1735
|
+
return;
|
|
1736
|
+
}
|
|
1737
|
+
await symlink(target, symlinkPath);
|
|
1738
|
+
} catch (error) {
|
|
1739
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1740
|
+
console.warn(`Warning: Failed to create symlink for "${skillName}": ${message}`);
|
|
1741
|
+
}
|
|
1742
|
+
}
|
|
1743
|
+
/**
|
|
1744
|
+
* Remove symlinks for a skill from all agent directories.
|
|
1745
|
+
*
|
|
1746
|
+
* @param skillName - Name of the skill (symlink name)
|
|
1747
|
+
* @param options - Symlink options
|
|
1748
|
+
*/
|
|
1749
|
+
async function removeAgentSymlinks(skillName, options) {
|
|
1750
|
+
const { agents, projectRoot, agentConfigs } = options;
|
|
1751
|
+
if (agents.length === 1 && agents[0] === "none") return;
|
|
1752
|
+
for (const agentName of agents) {
|
|
1753
|
+
const config = resolveAgentConfig(agentName, agentConfigs);
|
|
1754
|
+
if (!config) continue;
|
|
1755
|
+
const symlinkPath = join(projectRoot, config.skillsDir, skillName);
|
|
1756
|
+
try {
|
|
1757
|
+
if ((await lstat(symlinkPath).catch(() => null))?.isSymbolicLink()) await rm(symlinkPath);
|
|
1758
|
+
} catch {}
|
|
1759
|
+
}
|
|
1760
|
+
}
|
|
1761
|
+
/**
|
|
1762
|
+
* Get the source path for a registry skill within .pspm/skills/.
|
|
1763
|
+
*
|
|
1764
|
+
* @param ownerOrNamespace - Skill author username, or namespace prefix
|
|
1765
|
+
* @param skillNameOrOwner - Skill name (2-arg form) or owner (3-arg form)
|
|
1766
|
+
* @param skillName - Skill name (3-arg form only)
|
|
1767
|
+
* @returns Relative path from project root (e.g., ".pspm/skills/alice/my-skill")
|
|
1768
|
+
*/
|
|
1769
|
+
function getRegistrySkillPath(ownerOrNamespace, skillNameOrOwner, skillName) {
|
|
1770
|
+
if (skillName !== void 0) {
|
|
1771
|
+
const namespace = ownerOrNamespace;
|
|
1772
|
+
const owner = skillNameOrOwner;
|
|
1773
|
+
if (namespace === "org") return `.pspm/skills/_org/${owner}/${skillName}`;
|
|
1774
|
+
if (namespace === "github") return `.pspm/skills/_github-registry/${owner}/${skillName}`;
|
|
1775
|
+
return `.pspm/skills/${owner}/${skillName}`;
|
|
1776
|
+
}
|
|
1777
|
+
return `.pspm/skills/${ownerOrNamespace}/${skillNameOrOwner}`;
|
|
1778
|
+
}
|
|
1779
|
+
/**
|
|
1780
|
+
* Get the source path for a GitHub skill within .pspm/skills/.
|
|
1781
|
+
*
|
|
1782
|
+
* @param owner - GitHub repository owner
|
|
1783
|
+
* @param repo - GitHub repository name
|
|
1784
|
+
* @param path - Optional path within the repository
|
|
1785
|
+
* @returns Relative path from project root (e.g., ".pspm/skills/_github/owner/repo/path")
|
|
1786
|
+
*/
|
|
1787
|
+
function getGitHubSkillPath(owner, repo, path) {
|
|
1788
|
+
if (path) return `.pspm/skills/_github/${owner}/${repo}/${path}`;
|
|
1789
|
+
return `.pspm/skills/_github/${owner}/${repo}`;
|
|
1790
|
+
}
|
|
1791
|
+
/**
|
|
1792
|
+
* Get the source path for a local skill within .pspm/skills/.
|
|
1793
|
+
*
|
|
1794
|
+
* @param skillName - Skill name
|
|
1795
|
+
* @returns Relative path from project root (e.g., ".pspm/skills/_local/my-skill")
|
|
1796
|
+
*/
|
|
1797
|
+
function getLocalSkillPath(skillName) {
|
|
1798
|
+
return `.pspm/skills/_local/${skillName}`;
|
|
1799
|
+
}
|
|
1800
|
+
/**
|
|
1801
|
+
* Get the source path for a well-known skill within .pspm/skills/.
|
|
1802
|
+
*
|
|
1803
|
+
* @param hostname - Source hostname (e.g., "acme.com")
|
|
1804
|
+
* @param skillName - Skill name
|
|
1805
|
+
* @returns Relative path from project root (e.g., ".pspm/skills/_wellknown/acme.com/my-skill")
|
|
1806
|
+
*/
|
|
1807
|
+
function getWellKnownSkillPath(hostname, skillName) {
|
|
1808
|
+
return `.pspm/skills/_wellknown/${hostname}/${skillName}`;
|
|
1809
|
+
}
|
|
1810
|
+
/**
|
|
1811
|
+
* Check which agents have symlinks for a given skill.
|
|
1812
|
+
*
|
|
1813
|
+
* @param skillName - Name of the skill (symlink name)
|
|
1814
|
+
* @param agents - Agent names to check
|
|
1815
|
+
* @param projectRoot - Project root directory
|
|
1816
|
+
* @param agentConfigs - Custom agent configurations
|
|
1817
|
+
* @returns Array of agent names that have valid symlinks
|
|
1818
|
+
*/
|
|
1819
|
+
async function getLinkedAgents(skillName, agents, projectRoot, agentConfigs) {
|
|
1820
|
+
const linkedAgents = [];
|
|
1821
|
+
for (const agentName of agents) {
|
|
1822
|
+
const config = resolveAgentConfig(agentName, agentConfigs);
|
|
1823
|
+
if (!config) continue;
|
|
1824
|
+
const symlinkPath = join(projectRoot, config.skillsDir, skillName);
|
|
1825
|
+
try {
|
|
1826
|
+
if ((await lstat(symlinkPath)).isSymbolicLink()) linkedAgents.push(agentName);
|
|
1827
|
+
} catch {}
|
|
1828
|
+
}
|
|
1829
|
+
return linkedAgents;
|
|
1830
|
+
}
|
|
1831
|
+
//#endregion
|
|
1832
|
+
export { isNewerVersion as $, getDependencies as A, resolveAgentConfig as B, downloadGitHubPackage as C, addGitHubDependency as D, addDependency as E, removeGitHubDependency as F, isGitHubSpecifier as G, generateRegistryIdentifier as H, writeManifest as I, parseGitHubShorthand as J, isGitHubUrl as K, getAvailableAgents as L, getManifestPath as M, readManifest as N, addLocalDependency as O, removeDependency as P, resolveRecursive as Q, parseAgentArg as R, GitHubRateLimitError as S, getGitHubDisplayName as T, getGitHubSkillName as U, formatGitHubSpecifier as V, isGitHubShorthand as W, parseGitHubUrl as X, parseGitHubSpecifier as Y, parseRegistrySpecifier as Z, readLockfile as _, getRegistrySkillPath as a, validateManifest as at, GitHubNotFoundError as b, addGitHubToLockfile as c, getExcludeArgsForRsync as ct, addToLockfileWithDeps as d, encryptBuffer as dt, resolveVersion as et, addWellKnownToLockfile as f, migrateLockfileIfNeeded as g, listLockfileWellKnownPackages as h, getLocalSkillPath as i, PSPM_SCHEMA_URL as it, getGitHubDependencies as j, addWellKnownDependency as k, addLocalToLockfile as l, loadIgnorePatterns as lt, listLockfileSkills as m, getGitHubSkillPath as n, printResolutionErrors as nt, getWellKnownSkillPath as o, calculateIntegrity as ot, listLockfileGitHubPackages as p, isRegistrySpecifier as q, getLinkedAgents as r, DEFAULT_SKILL_FILES as rt, removeAgentSymlinks as s, ALWAYS_IGNORED as st, createAgentSymlinks as t, computeInstallOrder as tt, addToLockfile as u, decryptBuffer as ut, removeFromLockfile as v, extractGitHubPackage as w, GitHubPathNotFoundError as x, removeGitHubFromLockfile as y, promptForAgents as z };
|
|
1833
|
+
|
|
1834
|
+
//# sourceMappingURL=symlinks-BTw8X0GG.js.map
|