agentsys 5.9.1 → 5.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "agentsys",
3
3
  "description": "20 specialized plugins for AI workflow automation - task orchestration, PR workflow, slop detection, code review, drift detection, enhancement analysis, documentation sync, unified static analysis, perf investigations, topic research, agent config linting, cross-tool AI consultation, structured AI debate, workflow pattern learning, codebase onboarding, contributor guidance, and Zig language support",
4
- "version": "5.9.1",
4
+ "version": "5.10.0",
5
5
  "owner": {
6
6
  "name": "Avi Fenesh",
7
7
  "url": "https://github.com/avifenesh"
@@ -26,7 +26,9 @@
26
26
  "name": "next-task",
27
27
  "source": {
28
28
  "source": "url",
29
- "url": "https://github.com/agent-sh/next-task.git"
29
+ "url": "https://github.com/agent-sh/next-task.git",
30
+ "ref": "v1.1.1",
31
+ "commit": "9aa32b856d81ebeeb6c6ea0ab421c80d6986a7b1"
30
32
  },
31
33
  "description": "Master workflow orchestrator: autonomous workflow with model optimization (opus/sonnet/haiku), two-file state management, workflow enforcement gates, 8 specialist agents",
32
34
  "version": "1.1.1",
@@ -37,7 +39,8 @@
37
39
  "name": "prepare-delivery",
38
40
  "source": {
39
41
  "source": "url",
40
- "url": "https://github.com/agent-sh/prepare-delivery.git"
42
+ "url": "https://github.com/agent-sh/prepare-delivery.git",
43
+ "commit": "2e8f400115d8df68e6e8e02466a97117c7f86fab"
41
44
  },
42
45
  "description": "Pre-ship quality gates: deslop, simplify, agnix, enhance, review loop, delivery validation, docs sync",
43
46
  "version": "0.1.0",
@@ -48,7 +51,8 @@
48
51
  "name": "gate-and-ship",
49
52
  "source": {
50
53
  "source": "url",
51
- "url": "https://github.com/agent-sh/gate-and-ship.git"
54
+ "url": "https://github.com/agent-sh/gate-and-ship.git",
55
+ "commit": "bfff7063fde89a05ce480e37019bb6e00ba55984"
52
56
  },
53
57
  "description": "Quality gates then ship - chains /prepare-delivery then /ship in one command",
54
58
  "version": "0.1.0",
@@ -59,10 +63,12 @@
59
63
  "name": "ship",
60
64
  "source": {
61
65
  "source": "url",
62
- "url": "https://github.com/agent-sh/ship.git"
66
+ "url": "https://github.com/agent-sh/ship.git",
67
+ "commit": "3e65dcf5f6aa60e8e42baff14eef0c2209302751",
68
+ "ref": "v1.1.1"
63
69
  },
64
70
  "description": "Complete PR workflow: commit to production, skips review when called from next-task, removes task from registry on cleanup, automatic rollback",
65
- "version": "1.0.0",
71
+ "version": "1.1.1",
66
72
  "category": "deployment",
67
73
  "homepage": "https://github.com/agent-sh/ship"
68
74
  },
@@ -70,7 +76,8 @@
70
76
  "name": "deslop",
71
77
  "source": {
72
78
  "source": "url",
73
- "url": "https://github.com/agent-sh/deslop.git"
79
+ "url": "https://github.com/agent-sh/deslop.git",
80
+ "commit": "be3ac2396dcabad450e5097e4f22f9f4e166a143"
74
81
  },
75
82
  "description": "3-phase AI slop detection: regex patterns (HIGH), multi-pass analyzers (MEDIUM), CLI tools (LOW)",
76
83
  "version": "1.0.0",
@@ -81,7 +88,8 @@
81
88
  "name": "audit-project",
82
89
  "source": {
83
90
  "source": "url",
84
- "url": "https://github.com/agent-sh/audit-project.git"
91
+ "url": "https://github.com/agent-sh/audit-project.git",
92
+ "commit": "a080ebf74184ba3ad2de19100e2b3a818c8a194c"
85
93
  },
86
94
  "description": "Multi-agent iterative code review until zero issues remain",
87
95
  "version": "1.0.0",
@@ -92,7 +100,8 @@
92
100
  "name": "drift-detect",
93
101
  "source": {
94
102
  "source": "url",
95
- "url": "https://github.com/agent-sh/drift-detect.git"
103
+ "url": "https://github.com/agent-sh/drift-detect.git",
104
+ "commit": "880c2ce1f0d637a947229281aea9b9a40156b6a4"
96
105
  },
97
106
  "description": "Deep repository analysis to realign project plans with code reality - detects drift, gaps, and creates prioritized reconstruction plans",
98
107
  "version": "1.0.0",
@@ -103,7 +112,8 @@
103
112
  "name": "enhance",
104
113
  "source": {
105
114
  "source": "url",
106
- "url": "https://github.com/agent-sh/enhance.git"
115
+ "url": "https://github.com/agent-sh/enhance.git",
116
+ "commit": "081e6b1c90bbd5b7297a18c6d01e86693825c113"
107
117
  },
108
118
  "description": "Master enhancement orchestrator: parallel analyzer execution for plugins, agents, docs, CLAUDE.md, and prompts with unified reporting",
109
119
  "version": "1.0.0",
@@ -114,7 +124,8 @@
114
124
  "name": "sync-docs",
115
125
  "source": {
116
126
  "source": "url",
117
- "url": "https://github.com/agent-sh/sync-docs.git"
127
+ "url": "https://github.com/agent-sh/sync-docs.git",
128
+ "commit": "f8281a98f440577934b67dc9ef3f9da85d56f9de"
118
129
  },
119
130
  "description": "Standalone documentation sync: find outdated refs, update CHANGELOG, flag stale examples based on code changes",
120
131
  "version": "1.0.0",
@@ -125,7 +136,9 @@
125
136
  "name": "repo-intel",
126
137
  "source": {
127
138
  "source": "url",
128
- "url": "https://github.com/agent-sh/repo-intel.git"
139
+ "url": "https://github.com/agent-sh/repo-intel.git",
140
+ "ref": "v0.2.0",
141
+ "commit": "100dbff1969097af5e6a2ede7a2a79400aa0e60f"
129
142
  },
130
143
  "description": "Unified static analysis via agent-analyzer - git history, AST symbols, project metadata, and doc-code sync",
131
144
  "version": "0.2.0",
@@ -136,7 +149,8 @@
136
149
  "name": "perf",
137
150
  "source": {
138
151
  "source": "url",
139
- "url": "https://github.com/agent-sh/perf.git"
152
+ "url": "https://github.com/agent-sh/perf.git",
153
+ "commit": "cc988ec68863a18243297d216e2fcd37802f2296"
140
154
  },
141
155
  "description": "Rigorous performance investigation workflow with baselines, profiling, hypotheses, and evidence-backed decisions",
142
156
  "version": "1.0.0",
@@ -147,7 +161,8 @@
147
161
  "name": "learn",
148
162
  "source": {
149
163
  "source": "url",
150
- "url": "https://github.com/agent-sh/learn.git"
164
+ "url": "https://github.com/agent-sh/learn.git",
165
+ "commit": "91983c1fe35b96bcda5360f4155465d69a0b01fc"
151
166
  },
152
167
  "description": "Research topics online and create comprehensive learning guides with RAG-optimized indexes",
153
168
  "version": "1.0.0",
@@ -158,10 +173,12 @@
158
173
  "name": "agnix",
159
174
  "source": {
160
175
  "source": "url",
161
- "url": "https://github.com/agent-sh/agnix.git"
176
+ "url": "https://github.com/agent-sh/agnix.git",
177
+ "commit": "e0d9ce8106830f7db9cadb5359259536f6239e2f",
178
+ "ref": "v0.22.1"
162
179
  },
163
180
  "description": "Lint agent configuration files (SKILL.md, CLAUDE.md, hooks, MCP) against 414 rules across 10+ AI tools",
164
- "version": "1.1.0",
181
+ "version": "0.22.1",
165
182
  "category": "development",
166
183
  "homepage": "https://github.com/agent-sh/agnix"
167
184
  },
@@ -169,7 +186,8 @@
169
186
  "name": "consult",
170
187
  "source": {
171
188
  "source": "url",
172
- "url": "https://github.com/agent-sh/consult.git"
189
+ "url": "https://github.com/agent-sh/consult.git",
190
+ "commit": "71a08ef5566cfb189d5161c2d0e31542d2c99155"
173
191
  },
174
192
  "description": "Cross-tool AI consultation: get second opinions from Gemini CLI, Codex CLI, Claude Code, OpenCode, or Copilot CLI with model and thinking effort control",
175
193
  "version": "1.0.0",
@@ -180,7 +198,8 @@
180
198
  "name": "debate",
181
199
  "source": {
182
200
  "source": "url",
183
- "url": "https://github.com/agent-sh/debate.git"
201
+ "url": "https://github.com/agent-sh/debate.git",
202
+ "commit": "95c0333b432aa8b0911e9accccecb219d3bc3ad7"
184
203
  },
185
204
  "description": "Structured multi-round debate between AI tools with proposer/challenger roles and verdict",
186
205
  "version": "1.0.0",
@@ -191,10 +210,12 @@
191
210
  "name": "web-ctl",
192
211
  "source": {
193
212
  "source": "url",
194
- "url": "https://github.com/agent-sh/web-ctl.git"
213
+ "url": "https://github.com/agent-sh/web-ctl.git",
214
+ "commit": "345e44bc8a7b373728afce6c0d94ef067b5abc82",
215
+ "ref": "v1.1.0"
195
216
  },
196
217
  "description": "Browser automation and web testing toolkit for AI agents - headless browser control, persistent sessions, auth handoff, and prompt injection defense",
197
- "version": "1.0.0",
218
+ "version": "1.1.0",
198
219
  "category": "automation",
199
220
  "homepage": "https://github.com/agent-sh/web-ctl"
200
221
  },
@@ -202,7 +223,8 @@
202
223
  "name": "skillers",
203
224
  "source": {
204
225
  "source": "url",
205
- "url": "https://github.com/agent-sh/skillers.git"
226
+ "url": "https://github.com/agent-sh/skillers.git",
227
+ "commit": "88efb0346b2582a224f3f85db72450b9f3ba7507"
206
228
  },
207
229
  "description": "Learn from workflow patterns across sessions and suggest skills, hooks, and agents to automate repetitive work",
208
230
  "version": "1.0.0",
@@ -213,7 +235,9 @@
213
235
  "name": "onboard",
214
236
  "source": {
215
237
  "source": "url",
216
- "url": "https://github.com/agent-sh/onboard.git"
238
+ "url": "https://github.com/agent-sh/onboard.git",
239
+ "ref": "v0.1.0",
240
+ "commit": "7444d6475055897498a348639dd0bcb12ba7906b"
217
241
  },
218
242
  "description": "Codebase onboarding - automated data collection and interactive project orientation",
219
243
  "version": "0.1.0",
@@ -224,7 +248,9 @@
224
248
  "name": "can-i-help",
225
249
  "source": {
226
250
  "source": "url",
227
- "url": "https://github.com/agent-sh/can-i-help.git"
251
+ "url": "https://github.com/agent-sh/can-i-help.git",
252
+ "ref": "v0.1.0",
253
+ "commit": "5610a54ce9200577879a0ad8a9dc174133f56abf"
228
254
  },
229
255
  "description": "Find where to contribute to any project - matches developer skills to test gaps, stale docs, bugspots, and open issues",
230
256
  "version": "0.1.0",
@@ -235,7 +261,8 @@
235
261
  "name": "zig-lsp",
236
262
  "source": {
237
263
  "source": "url",
238
- "url": "https://github.com/agent-sh/zig-lsp.git"
264
+ "url": "https://github.com/agent-sh/zig-lsp.git",
265
+ "commit": "ecf32677d7bb3cc8e28112bb3c70dd7809a3aea7"
239
266
  },
240
267
  "description": "Zig language server for Claude Code via ZLS - automatic diagnostics after every edit, jump-to-definition, find-references, and hover. Requires zls in PATH",
241
268
  "version": "0.1.0",
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentsys",
3
- "version": "5.9.1",
3
+ "version": "5.10.0",
4
4
  "description": "Professional-grade slash commands for Claude Code with cross-platform support",
5
5
  "keywords": [
6
6
  "workflow",
package/CHANGELOG.md CHANGED
@@ -9,6 +9,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
9
9
 
10
10
  ## [Unreleased]
11
11
 
12
+ ## [5.10.0] - 2026-04-26
13
+
14
+ ### Security
15
+ - **Marketplace supply-chain hardening** (#347) - pin every `source: "url"` sub-plugin entry in `.claude-plugin/marketplace.json` to an immutable commit SHA (plus release tag when one exists) instead of tracking default branches. Unpinned `source: "url"` entries previously let `claude plugin install` follow the remote's default branch, meaning any sub-plugin compromise would ship code to every user on their next install. New `scripts/pin-marketplace.js` resolves `v<version>` tags to commit SHAs via `gh api repos/.../git/ref/tags/<tag>` (annotated tags are dereferenced to the underlying commit), rejects ambiguous array responses, and falls back to default-branch HEAD SHA when the desired tag does not yet exist. Covered by `__tests__/pin-marketplace.test.js`.
16
+ - **Reusable CI workflow SHA-pinned** (#347) - `agent-sh/.github/.github/workflows/agnix.yml@main` pinned to an explicit commit SHA so a compromise of the shared workflows repo cannot silently change agentsys's CI behavior.
17
+ - **Release workflow shell injection hardening** (#347) - replaced 5 shell blocks that interpolated `${{ inputs.version }}` / `${{ github.event.inputs.* }}` directly into bash with `env:` block wiring; values are now read as shell variables so a malicious tag/input cannot break out of the command string.
18
+ - **Removed self-referential npm dependency** (#347) - the `"agentsys": "^5.0.0"` entry in `package.json` / `package-lock.json` had no functional purpose and could confuse resolvers.
19
+ - **`agent-analyzer` binary downloader security** (#350, synced from agent-core) - `lib/binary/index.js` now requires a matching `.sha256` sidecar, computes and verifies SHA-256 before extraction (with an explicit `skipChecksum` escape hatch for local dev), and extracts into an isolated scratch directory with archive-path-traversal defenses: reject absolute paths, UNC paths, drive letters, `..` segments, and symlinks; copy only the expected binary into the final install location; scrub the scratch tree afterward. Windows extraction moved from `Expand-Archive` command strings to a `-File` PowerShell script with env-var argument passing so paths containing spaces are handled safely. Covered by `lib/binary/index.test.js`.
20
+
21
+ ### Changed
22
+ - **Marketplace pins upgraded to release tags** - post-#347, re-ran `scripts/pin-marketplace.js` after the downstream plugins cut tagged releases:
23
+ - `agnix`: default-branch SHA -> `v0.22.1` (tag + commit)
24
+ - `web-ctl`: default-branch SHA -> `v1.1.0` (tag + commit)
25
+ - `ship`: default-branch SHA -> `v1.1.1` (tag + commit)
26
+ - Running totals: 7 plugins pinned to tag + SHA, 13 still on default-branch SHA pending their first release tag.
27
+ - Bumped `version` fields in `marketplace.json` for `agnix`, `web-ctl`, and `ship` to match the latest published tags so future `pin-marketplace.js` runs resolve to the correct refs.
28
+
12
29
  ## [5.9.1] - 2026-04-26
13
30
 
14
31
  ### Changed
package/bin/cli.js CHANGED
@@ -342,6 +342,15 @@ function resolvePluginDeps(names, marketplace) {
342
342
  * @param {string} version - Expected version string
343
343
  * @returns {Promise<string>} Path to extracted plugin directory
344
344
  */
345
+ // TODO(agentsys-security): this local dev installer currently honors only
346
+ // `source.url` + `plugin.version` and ignores `source.ref` / `source.commit`
347
+ // from marketplace.json. Claude Code's plugin installer (the primary install
348
+ // path for end users) DOES honor `ref` and `commit` per the marketplace
349
+ // schema, so the pins added by scripts/pin-marketplace.js are authoritative
350
+ // for real users. This dev CLI should be updated to prefer `source.commit`
351
+ // (then `source.ref`, then `plugin.version`) when resolving the fetch ref,
352
+ // so local dev gets the same supply-chain guarantees as production installs.
353
+ // Tracked as a follow-up; not fixed in PR #347 to keep that PR scoped.
345
354
  async function fetchPlugin(name, source, version) {
346
355
  const cacheDir = getPluginCacheDir();
347
356
  const pluginDir = path.join(cacheDir, name);
@@ -6,6 +6,19 @@
6
6
  * Handles lazy downloading and execution. Since Claude Code plugins have no
7
7
  * postinstall hooks, the binary is downloaded at runtime on first use.
8
8
  *
9
+ * Security hardening (2026-04-26 audit):
10
+ * - Every release asset is verified against its `<asset>.sha256` sidecar
11
+ * before extraction. A mismatch aborts with a clear message.
12
+ * - Archives are extracted into an isolated tmpdir. Each entry path is
13
+ * validated to reject absolute paths, Windows drive letters, and parent
14
+ * traversal (`..`). The expected binary is then moved to the final
15
+ * destination; everything else is discarded.
16
+ * - The Windows zip path runs a PowerShell helper script via `-File` and
17
+ * passes paths through environment variables, so PowerShell never
18
+ * re-parses a command string (which broke on spaces/brackets). The
19
+ * script validates every zip entry before extracting it and rejects
20
+ * absolute, UNC, and parent-traversal entries.
21
+ *
9
22
  * @module lib/binary
10
23
  */
11
24
 
@@ -14,6 +27,7 @@ const path = require('path');
14
27
  const os = require('os');
15
28
  const https = require('https');
16
29
  const cp = require('child_process');
30
+ const crypto = require('crypto');
17
31
  const { promisify } = require('util');
18
32
 
19
33
  const execFileAsync = promisify(cp.execFile);
@@ -120,7 +134,7 @@ async function isAvailableAsync() {
120
134
  }
121
135
 
122
136
  // ---------------------------------------------------------------------------
123
- // Download
137
+ // Download + checksum verification
124
138
  // ---------------------------------------------------------------------------
125
139
 
126
140
  /**
@@ -180,67 +194,400 @@ function downloadToBuffer(url) {
180
194
  }
181
195
 
182
196
  /**
183
- * Extract a tar.gz buffer into a directory using the system tar command.
197
+ * Parse the leading 64-hex digest from a `.sha256` sidecar body.
198
+ * Tolerant of these formats (GNU coreutils + BSD `shasum`):
199
+ * "<64-hex>\n"
200
+ * "<64-hex> <filename>\n" (text mode, two-space separator)
201
+ * "<64-hex> *<filename>\n" (binary mode, leading asterisk)
202
+ * "<64-hex> <filename>\n" (single-space variants)
203
+ * Throws if no valid digest is found.
204
+ * @param {string} body
205
+ * @returns {string} lower-cased 64-char hex digest
206
+ */
207
+ function parseSha256Sidecar(body) {
208
+ if (typeof body !== 'string') body = String(body || '');
209
+ const match = body.trim().match(/^([A-Fa-f0-9]{64})\b/);
210
+ if (!match) {
211
+ throw new Error('Could not parse SHA-256 digest from sidecar body');
212
+ }
213
+ return match[1].toLowerCase();
214
+ }
215
+
216
+ /**
217
+ * Fetch and parse a `.sha256` sidecar next to an asset URL.
218
+ * @param {string} assetUrl full URL of the archive (not the sidecar)
219
+ * @returns {Promise<string>} lower-cased hex digest
220
+ */
221
+ async function downloadSha256(assetUrl) {
222
+ const sidecarUrl = assetUrl + '.sha256';
223
+ const buf = await downloadToBuffer(sidecarUrl);
224
+ return parseSha256Sidecar(buf.toString('utf8'));
225
+ }
226
+
227
+ /**
228
+ * Compute the lower-case hex SHA-256 of a Buffer.
229
+ * @param {Buffer} buf
230
+ * @returns {string}
231
+ */
232
+ function sha256Hex(buf) {
233
+ return crypto.createHash('sha256').update(buf).digest('hex');
234
+ }
235
+
236
+ /**
237
+ * Verify a downloaded buffer against an expected hex digest.
238
+ * Throws with a security-focused message on mismatch.
184
239
  * @param {Buffer} buf
185
- * @param {string} destDir
186
- * @returns {Promise<void>}
240
+ * @param {string} expectedHex
241
+ * @param {string} filename user-facing name for the error message
187
242
  */
188
- function extractTarGz(buf, destDir) {
243
+ function verifySha256(buf, expectedHex, filename) {
244
+ const expected = String(expectedHex || '').toLowerCase();
245
+ const actual = sha256Hex(buf);
246
+ if (expected !== actual) {
247
+ throw new Error(
248
+ 'SHA-256 verification failed for ' + filename + ': ' +
249
+ 'expected ' + expected + ', got ' + actual + '. ' +
250
+ 'This could indicate a tampered release. Do not extract.'
251
+ );
252
+ }
253
+ }
254
+
255
+ // ---------------------------------------------------------------------------
256
+ // Archive entry validation
257
+ // ---------------------------------------------------------------------------
258
+
259
+ /**
260
+ * Reject archive entries with paths that could escape the extract directory.
261
+ * Rules:
262
+ * - No absolute POSIX paths (leading `/`)
263
+ * - No Windows absolute paths (drive letter like `C:\` or `C:/`)
264
+ * - No UNC paths (`\\server\share`)
265
+ * - No `..` as a path component
266
+ * - No empty entry names
267
+ * @param {string} entry
268
+ * @throws {Error} on unsafe entry
269
+ */
270
+ function assertSafeArchiveEntry(entry) {
271
+ if (!entry || typeof entry !== 'string') {
272
+ throw new Error('Refusing to extract archive with empty entry name');
273
+ }
274
+ const name = entry.replace(/\\/g, '/').trim();
275
+ if (name.length === 0) {
276
+ throw new Error('Refusing to extract archive with empty entry name');
277
+ }
278
+ if (name.startsWith('//')) {
279
+ throw new Error('Refusing to extract archive with UNC entry: ' + entry);
280
+ }
281
+ if (name.startsWith('/')) {
282
+ throw new Error('Refusing to extract archive with absolute entry: ' + entry);
283
+ }
284
+ if (/^[A-Za-z]:[\\/]/.test(entry)) {
285
+ throw new Error('Refusing to extract archive with Windows absolute entry: ' + entry);
286
+ }
287
+ const parts = name.split('/').filter(function(p) { return p.length > 0; });
288
+ for (let i = 0; i < parts.length; i++) {
289
+ if (parts[i] === '..') {
290
+ throw new Error('Refusing to extract archive with parent-traversal entry: ' + entry);
291
+ }
292
+ }
293
+ }
294
+
295
+ /**
296
+ * List the entries inside a tar.gz buffer by running `tar -tz` over stdin.
297
+ * Returns the raw list; caller is responsible for validating each entry.
298
+ * @param {Buffer} buf
299
+ * @returns {Promise<string[]>}
300
+ */
301
+ function listTarGzEntries(buf) {
189
302
  return new Promise(function(resolve, reject) {
190
- const tarDest = process.platform === 'win32' ? destDir.replace(/\\/g, '/') : destDir;
191
- const tar = cp.spawn('tar', ['xz', '-C', tarDest], {
192
- stdio: ['pipe', 'pipe', 'pipe']
193
- });
303
+ const tar = cp.spawn('tar', ['-tz'], { stdio: ['pipe', 'pipe', 'pipe'] });
304
+ let stdout = '';
194
305
  let stderr = '';
306
+ tar.stdout.on('data', function(d) { stdout += d; });
195
307
  tar.stderr.on('data', function(d) { stderr += d; });
196
- tar.stdin.write(buf);
197
- tar.stdin.end();
308
+ tar.on('error', reject);
198
309
  tar.on('close', function(code) {
199
310
  if (code !== 0) {
200
- reject(new Error('tar extraction failed (code ' + code + '): ' + stderr));
201
- } else {
202
- resolve();
311
+ reject(new Error('tar -tz listing failed (code ' + code + '): ' + stderr));
312
+ return;
203
313
  }
314
+ const entries = stdout.split(/\r?\n/).filter(function(l) { return l.length > 0; });
315
+ resolve(entries);
204
316
  });
205
- tar.on('error', reject);
317
+ tar.stdin.write(buf);
318
+ tar.stdin.end();
206
319
  });
207
320
  }
208
321
 
209
322
  /**
210
- * Extract a zip buffer into a directory using PowerShell Expand-Archive (Windows).
323
+ * Verify that a path resolved from extraction lies inside a known root.
324
+ * Guards against symlinks and any surprise introduced by the OS extractor.
325
+ * @param {string} root
326
+ * @param {string} candidate
327
+ */
328
+ function assertInsideRoot(root, candidate) {
329
+ const rootResolved = path.resolve(root) + path.sep;
330
+ const candResolved = path.resolve(candidate);
331
+ if (candResolved !== path.resolve(root) && !candResolved.startsWith(rootResolved)) {
332
+ throw new Error('Extracted path escapes extract root: ' + candidate);
333
+ }
334
+ }
335
+
336
+ /**
337
+ * Recursively walk a directory and return all file paths (not dirs).
338
+ * Throws if any symlink is encountered (defense in depth: no surprise escapes).
339
+ * @param {string} dir
340
+ * @returns {string[]}
341
+ */
342
+ function walkFiles(dir) {
343
+ const out = [];
344
+ const stack = [dir];
345
+ while (stack.length > 0) {
346
+ const cur = stack.pop();
347
+ const st = fs.lstatSync(cur);
348
+ if (st.isSymbolicLink()) {
349
+ throw new Error('Refusing to follow symlink produced by extractor: ' + cur);
350
+ }
351
+ if (st.isDirectory()) {
352
+ const names = fs.readdirSync(cur);
353
+ for (let i = 0; i < names.length; i++) {
354
+ stack.push(path.join(cur, names[i]));
355
+ }
356
+ } else if (st.isFile()) {
357
+ out.push(cur);
358
+ }
359
+ }
360
+ return out;
361
+ }
362
+
363
+ /**
364
+ * Remove a directory tree, tolerating already-missing paths.
365
+ * @param {string} dir
366
+ */
367
+ function rmrf(dir) {
368
+ try {
369
+ fs.rmSync(dir, { recursive: true, force: true });
370
+ } catch (e) {
371
+ /* ignore */
372
+ }
373
+ }
374
+
375
+ // ---------------------------------------------------------------------------
376
+ // Extraction
377
+ // ---------------------------------------------------------------------------
378
+
379
+ /**
380
+ * Extract a tar.gz buffer into a scratch directory, validating entries first.
381
+ * Returns the scratch directory; caller is responsible for moving files out
382
+ * and calling rmrf() on it.
211
383
  * @param {Buffer} buf
212
- * @param {string} destDir
213
- * @param {string} binaryName
214
- * @returns {Promise<void>}
384
+ * @returns {Promise<string>} scratch dir
215
385
  */
216
- function extractZip(buf, destDir, binaryName) {
217
- return new Promise(function(resolve, reject) {
218
- const tmpZip = path.join(os.tmpdir(), binaryName + '-' + Date.now() + '.zip');
219
- fs.writeFileSync(tmpZip, buf);
220
- const cmd = 'Expand-Archive -Path \'' + tmpZip + '\' -DestinationPath \'' + destDir + '\' -Force';
221
- const ps = cp.spawn('powershell', ['-NoProfile', '-NonInteractive', '-Command', cmd], {
222
- stdio: ['ignore', 'pipe', 'pipe']
386
+ async function extractTarGzToScratch(buf) {
387
+ const entries = await listTarGzEntries(buf);
388
+ for (let i = 0; i < entries.length; i++) {
389
+ assertSafeArchiveEntry(entries[i]);
390
+ }
391
+
392
+ const scratch = fs.mkdtempSync(path.join(os.tmpdir(), 'agent-analyzer-tar-'));
393
+
394
+ try {
395
+ await new Promise(function(resolve, reject) {
396
+ const tar = cp.spawn('tar', ['xz', '-C', scratch], { stdio: ['pipe', 'pipe', 'pipe'] });
397
+ let stderr = '';
398
+ tar.stderr.on('data', function(d) { stderr += d; });
399
+ tar.on('error', reject);
400
+ tar.on('close', function(code) {
401
+ if (code !== 0) {
402
+ reject(new Error('tar extraction failed (code ' + code + '): ' + stderr));
403
+ } else {
404
+ resolve();
405
+ }
406
+ });
407
+ tar.stdin.write(buf);
408
+ tar.stdin.end();
223
409
  });
224
- let stderr = '';
225
- ps.stderr.on('data', function(d) { stderr += d; });
226
- ps.on('close', function(code) {
227
- try { fs.unlinkSync(tmpZip); } catch (e) { /* ignore */ }
228
- if (code !== 0) {
229
- reject(new Error('zip extraction failed (code ' + code + '): ' + stderr));
230
- } else {
231
- resolve();
232
- }
410
+
411
+ // Defense in depth: reject any symlink or non-regular entry the OS
412
+ // extractor may have created, and confirm every file resolves inside
413
+ // scratch.
414
+ const files = walkFiles(scratch);
415
+ for (let i = 0; i < files.length; i++) {
416
+ assertInsideRoot(scratch, files[i]);
417
+ }
418
+ } catch (err) {
419
+ rmrf(scratch);
420
+ throw err;
421
+ }
422
+
423
+ return scratch;
424
+ }
425
+
426
+ /**
427
+ * PowerShell script body that validates and extracts a zip entry-by-entry
428
+ * using .NET's System.IO.Compression.ZipFile. Paths and output dir are read
429
+ * from environment variables (`SRC_ZIP`, `DEST_DIR`) so no argument parsing
430
+ * can split on spaces, wildcards, or quotes.
431
+ *
432
+ * Rejects:
433
+ * - Absolute entry names (POSIX `/`, Windows `C:\`)
434
+ * - UNC entry names (`\\server\share`)
435
+ * - Any `..` path component
436
+ * - Resolved paths that escape the destination directory
437
+ *
438
+ * On any validation failure the script writes to stderr and exits with a
439
+ * non-zero status; nothing is extracted.
440
+ */
441
+ const EXTRACT_ZIP_PS1 = [
442
+ '$ErrorActionPreference = "Stop"',
443
+ '$src = $env:SRC_ZIP',
444
+ '$dest = $env:DEST_DIR',
445
+ 'if ([string]::IsNullOrEmpty($src) -or [string]::IsNullOrEmpty($dest)) {',
446
+ ' [Console]::Error.WriteLine("SRC_ZIP and DEST_DIR must both be set"); exit 2',
447
+ '}',
448
+ 'Add-Type -AssemblyName System.IO.Compression.FileSystem',
449
+ '$destFull = [System.IO.Path]::GetFullPath($dest)',
450
+ 'if (-not $destFull.EndsWith([System.IO.Path]::DirectorySeparatorChar)) {',
451
+ ' $destFull = $destFull + [System.IO.Path]::DirectorySeparatorChar',
452
+ '}',
453
+ '$zip = [System.IO.Compression.ZipFile]::OpenRead($src)',
454
+ 'try {',
455
+ ' foreach ($entry in $zip.Entries) {',
456
+ ' $name = $entry.FullName',
457
+ ' if ([string]::IsNullOrEmpty($name)) { continue }',
458
+ ' $norm = $name -replace "\\\\","/"',
459
+ ' if ($norm.StartsWith("/") -or $norm.StartsWith("//")) {',
460
+ ' [Console]::Error.WriteLine("Refusing absolute/UNC entry: " + $name); exit 3',
461
+ ' }',
462
+ ' if ($name -match "^[A-Za-z]:[\\\\/]") {',
463
+ ' [Console]::Error.WriteLine("Refusing Windows-absolute entry: " + $name); exit 3',
464
+ ' }',
465
+ ' foreach ($part in ($norm -split "/")) {',
466
+ ' if ($part -eq "..") {',
467
+ ' [Console]::Error.WriteLine("Refusing parent-traversal entry: " + $name); exit 3',
468
+ ' }',
469
+ ' }',
470
+ ' $target = [System.IO.Path]::GetFullPath([System.IO.Path]::Combine($destFull, $norm))',
471
+ ' if (-not $target.StartsWith($destFull, [System.StringComparison]::OrdinalIgnoreCase)) {',
472
+ ' [Console]::Error.WriteLine("Entry escapes destination: " + $name); exit 3',
473
+ ' }',
474
+ ' if ($entry.FullName.EndsWith("/")) {',
475
+ ' [System.IO.Directory]::CreateDirectory($target) | Out-Null',
476
+ ' } else {',
477
+ ' $parent = [System.IO.Path]::GetDirectoryName($target)',
478
+ ' if ($parent) { [System.IO.Directory]::CreateDirectory($parent) | Out-Null }',
479
+ ' [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $target, $true)',
480
+ ' }',
481
+ ' }',
482
+ '} finally {',
483
+ ' $zip.Dispose()',
484
+ '}'
485
+ ].join('\r\n');
486
+
487
+ /**
488
+ * Extract a zip buffer into a scratch directory.
489
+ *
490
+ * The extraction runs a PowerShell helper script via `-File` so PowerShell
491
+ * never re-parses a command string (which would break on paths containing
492
+ * spaces or brackets). The script reads the zip and destination paths from
493
+ * environment variables, validates every entry's path before extracting, and
494
+ * writes files individually using .NET's ZipFile APIs.
495
+ *
496
+ * After extraction, `walkFiles` re-checks the tree and rejects any symlink
497
+ * or junction that might have been created.
498
+ *
499
+ * @param {Buffer} buf
500
+ * @returns {Promise<string>} scratch dir
501
+ */
502
+ async function extractZipToScratch(buf) {
503
+ const scratch = fs.mkdtempSync(path.join(os.tmpdir(), 'agent-analyzer-zip-'));
504
+ const tmpZip = path.join(scratch, '__archive.zip');
505
+ const scriptDir = fs.mkdtempSync(path.join(os.tmpdir(), 'agent-analyzer-ps-'));
506
+ const scriptPath = path.join(scriptDir, 'extract.ps1');
507
+
508
+ try {
509
+ fs.writeFileSync(tmpZip, buf);
510
+ fs.writeFileSync(scriptPath, EXTRACT_ZIP_PS1, 'utf8');
511
+
512
+ await new Promise(function(resolve, reject) {
513
+ const child = cp.execFile(
514
+ 'powershell.exe',
515
+ [
516
+ '-NoProfile',
517
+ '-NonInteractive',
518
+ '-ExecutionPolicy', 'Bypass',
519
+ '-File', scriptPath
520
+ ],
521
+ {
522
+ windowsHide: true,
523
+ env: Object.assign({}, process.env, {
524
+ SRC_ZIP: tmpZip,
525
+ DEST_DIR: scratch
526
+ })
527
+ },
528
+ function(err, _stdout, stderr) {
529
+ if (err) {
530
+ reject(new Error('zip extraction failed: ' + (stderr || err.message)));
531
+ } else {
532
+ resolve();
533
+ }
534
+ }
535
+ );
536
+ // Do not write to stdin; the script reads from env.
537
+ if (child.stdin) child.stdin.end();
233
538
  });
234
- ps.on('error', reject);
235
- });
539
+
540
+ try { fs.unlinkSync(tmpZip); } catch (e) { /* ignore */ }
541
+
542
+ // Defense in depth: walkFiles() throws on any symlink/junction. Also
543
+ // confirm every file resolves inside scratch.
544
+ const files = walkFiles(scratch);
545
+ for (let i = 0; i < files.length; i++) {
546
+ assertInsideRoot(scratch, files[i]);
547
+ }
548
+ } catch (err) {
549
+ rmrf(scratch);
550
+ throw err;
551
+ } finally {
552
+ rmrf(scriptDir);
553
+ }
554
+
555
+ return scratch;
556
+ }
557
+
558
+ /**
559
+ * Find the expected binary inside a scratch directory (recursive search).
560
+ * @param {string} scratch
561
+ * @param {string} binaryBaseName e.g. `agent-analyzer` or `agent-analyzer.exe`
562
+ * @returns {string|null} absolute path, or null if not found
563
+ */
564
+ function findBinaryInScratch(scratch, binaryBaseName) {
565
+ const files = walkFiles(scratch);
566
+ for (let i = 0; i < files.length; i++) {
567
+ if (path.basename(files[i]) === binaryBaseName) {
568
+ assertInsideRoot(scratch, files[i]);
569
+ return files[i];
570
+ }
571
+ }
572
+ return null;
236
573
  }
237
574
 
575
+ // ---------------------------------------------------------------------------
576
+ // Download + install
577
+ // ---------------------------------------------------------------------------
578
+
238
579
  /**
239
580
  * Download and install the binary for the current platform into ~/.agent-sh/bin/.
240
581
  * @param {string} ver
241
- * @returns {Promise<string>}
582
+ * @param {Object} [options]
583
+ * @param {boolean} [options.skipChecksum=false] LOCAL DEV ONLY. Skips the
584
+ * `.sha256` sidecar fetch and verification. NEVER set this in production.
585
+ * @returns {Promise<string>} path to the installed binary
242
586
  */
243
- async function downloadBinary(ver) {
587
+ async function downloadBinary(ver, options) {
588
+ const opts = options || {};
589
+ const skipChecksum = opts.skipChecksum === true;
590
+
244
591
  const platformKey = getPlatformKey();
245
592
  if (!platformKey) {
246
593
  throw new Error(
@@ -250,12 +597,14 @@ async function downloadBinary(ver) {
250
597
  }
251
598
 
252
599
  const url = buildDownloadUrl(ver, platformKey);
253
- process.stderr.write('Downloading ' + BINARY_NAME + ' v' + ver + ' for ' + platformKey + '...' + '\n');
600
+ const filename = url.substring(url.lastIndexOf('/') + 1);
601
+ process.stderr.write('Downloading ' + BINARY_NAME + ' v' + ver + ' for ' + platformKey + '...\n');
254
602
 
255
603
  const binPath = getBinaryPath();
256
604
  const binDir = path.dirname(binPath);
257
605
  fs.mkdirSync(binDir, { recursive: true });
258
606
 
607
+ // --- 1. Fetch archive bytes --------------------------------------------
259
608
  let buf;
260
609
  try {
261
610
  buf = await downloadToBuffer(url);
@@ -271,10 +620,53 @@ async function downloadBinary(ver) {
271
620
  );
272
621
  }
273
622
 
274
- if (process.platform === 'win32') {
275
- await extractZip(buf, binDir, path.basename(binPath));
623
+ // --- 2. Verify SHA-256 sidecar -----------------------------------------
624
+ if (skipChecksum) {
625
+ process.stderr.write(
626
+ '[WARN] skipChecksum=true - SHA-256 verification disabled. ' +
627
+ 'This is LOCAL DEV ONLY and MUST NOT be used in production.\n'
628
+ );
276
629
  } else {
277
- await extractTarGz(buf, binDir);
630
+ let expected;
631
+ try {
632
+ expected = await downloadSha256(url);
633
+ } catch (err) {
634
+ throw new Error(
635
+ 'Failed to fetch SHA-256 sidecar for ' + filename + ':\n' +
636
+ ' URL: ' + url + '.sha256\n' +
637
+ ' Error: ' + err.message + '\n\n' +
638
+ 'The release may be missing its checksum file. Refusing to install ' +
639
+ 'an unverified binary. If this is a legacy release without sidecars, ' +
640
+ 'pass { skipChecksum: true } to downloadBinary() (LOCAL DEV ONLY).'
641
+ );
642
+ }
643
+ verifySha256(buf, expected, filename);
644
+ }
645
+
646
+ // --- 3. Extract to isolated scratch dir + validate entries -------------
647
+ const binaryBaseName = path.basename(binPath);
648
+ let scratch;
649
+ try {
650
+ if (process.platform === 'win32') {
651
+ scratch = await extractZipToScratch(buf);
652
+ } else {
653
+ scratch = await extractTarGzToScratch(buf);
654
+ }
655
+
656
+ // --- 4. Locate the expected binary inside scratch --------------------
657
+ const extractedBin = findBinaryInScratch(scratch, binaryBaseName);
658
+ if (!extractedBin) {
659
+ throw new Error(
660
+ 'Expected binary "' + binaryBaseName + '" not found inside archive ' +
661
+ filename + '. Archive layout may have changed.'
662
+ );
663
+ }
664
+
665
+ // --- 5. Move ONLY the expected binary to its final location ----------
666
+ // copyFileSync so cross-device moves work. scratch is rmrf'd in finally.
667
+ fs.copyFileSync(extractedBin, binPath);
668
+ } finally {
669
+ if (scratch) rmrf(scratch);
278
670
  }
279
671
 
280
672
  if (process.platform !== 'win32') {
@@ -300,6 +692,7 @@ async function downloadBinary(ver) {
300
692
  * Ensure the binary exists and meets the minimum version. Downloads if needed.
301
693
  * @param {Object} [options]
302
694
  * @param {string} [options.version]
695
+ * @param {boolean} [options.skipChecksum=false] LOCAL DEV ONLY.
303
696
  * @returns {Promise<string>}
304
697
  */
305
698
  async function ensureBinary(options) {
@@ -314,7 +707,7 @@ async function ensureBinary(options) {
314
707
  }
315
708
  }
316
709
 
317
- return downloadBinary(targetVer);
710
+ return downloadBinary(targetVer, { skipChecksum: opts.skipChecksum === true });
318
711
  }
319
712
 
320
713
  /**
@@ -322,6 +715,7 @@ async function ensureBinary(options) {
322
715
  * Prefer ensureBinary() unless a sync API is strictly required.
323
716
  * @param {Object} [options]
324
717
  * @param {string} [options.version]
718
+ * @param {boolean} [options.skipChecksum=false] LOCAL DEV ONLY.
325
719
  * @returns {string}
326
720
  */
327
721
  function ensureBinarySync(options) {
@@ -335,10 +729,12 @@ function ensureBinarySync(options) {
335
729
  }
336
730
 
337
731
  const targetVer = (options && options.version) || ANALYZER_MIN_VERSION;
732
+ const skipChecksum = !!(options && options.skipChecksum);
338
733
  const selfPath = __filename;
339
734
  const helperLines = [
340
735
  'var b = require(' + JSON.stringify(selfPath) + ');',
341
- 'b.ensureBinary({ version: ' + JSON.stringify(targetVer) + ' })',
736
+ 'b.ensureBinary({ version: ' + JSON.stringify(targetVer) +
737
+ ', skipChecksum: ' + JSON.stringify(skipChecksum) + ' })',
342
738
  ' .then(function(p) { process.stdout.write(p); })',
343
739
  ' .catch(function(e) { process.stderr.write(e.message); process.exit(1); });'
344
740
  ];
@@ -394,5 +790,16 @@ module.exports = {
394
790
  isAvailableAsync,
395
791
  meetsMinimumVersion,
396
792
  buildDownloadUrl,
397
- PLATFORM_MAP
793
+ PLATFORM_MAP,
794
+ // Exported for tests + advanced consumers
795
+ parseSha256Sidecar,
796
+ verifySha256,
797
+ sha256Hex,
798
+ assertSafeArchiveEntry,
799
+ assertInsideRoot,
800
+ downloadBinary,
801
+ // Exported for tests only
802
+ extractTarGzToScratch,
803
+ extractZipToScratch,
804
+ _EXTRACT_ZIP_PS1: EXTRACT_ZIP_PS1
398
805
  };
@@ -303,17 +303,19 @@ function formatSection(title, content) {
303
303
  */
304
304
 
305
305
  /**
306
- * Truncate text to limit with ellipsis
306
+ * Truncate text to limit with ellipsis.
307
+ *
308
+ * Slices on Unicode code points (not UTF-16 code units) so multi-byte
309
+ * chars like emoji never end up as orphan surrogates. Non-positive
310
+ * maxLength returns the original string unchanged.
307
311
  *
308
312
  * @param {string} text - Text to truncate
309
- * @param {number} maxLength - Maximum length
313
+ * @param {number} maxLength - Maximum length (in code points)
310
314
  * @returns {string} Truncated text
311
315
  */
312
316
  function truncate(text, maxLength) {
313
- // Negative or zero maxLength: return original text unchanged
314
317
  if (maxLength <= 0) return text;
315
- // Use Array.from to iterate over code points (handles emoji/surrogate pairs)
316
- const codePoints = Array.from(text);
318
+ const codePoints = [...text];
317
319
  if (codePoints.length <= maxLength) return text;
318
320
  return codePoints.slice(0, maxLength - 3).join('') + '...';
319
321
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentsys",
3
- "version": "5.9.1",
3
+ "version": "5.10.0",
4
4
  "description": "A modular runtime and orchestration system for AI agents - works with Claude Code, OpenCode, and Codex CLI",
5
5
  "main": "lib/platform/detect-platform.js",
6
6
  "type": "commonjs",
@@ -81,7 +81,6 @@
81
81
  "node": ">=18.0.0"
82
82
  },
83
83
  "dependencies": {
84
- "agentsys": "^5.0.0",
85
84
  "js-yaml": "~4.1.1"
86
85
  },
87
86
  "devDependencies": {
@@ -0,0 +1,224 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Pin each marketplace sub-plugin entry to a release tag (and commit SHA for
4
+ * defense in depth). Falls back to pinning current default-branch HEAD when a
5
+ * release tag for the declared `version` does not exist on the remote.
6
+ *
7
+ * Rationale: unpinned `source: "url"` entries let `claude plugin install`
8
+ * track the default branch, which is a supply-chain compromise vector. Pinning
9
+ * to a tag (for humans) AND the tag's resolved commit SHA (for integrity)
10
+ * ensures the exact bytes we ship are the exact bytes users get.
11
+ *
12
+ * Usage: node scripts/pin-marketplace.js [--dry-run]
13
+ *
14
+ * Requires: `gh` CLI authenticated against the agent-sh org.
15
+ */
16
+
17
+ 'use strict';
18
+
19
+ const fs = require('fs');
20
+ const path = require('path');
21
+ const { execFileSync } = require('child_process');
22
+
23
+ const DRY_RUN = process.argv.includes('--dry-run');
24
+ const MARKETPLACE_PATH = path.join(
25
+ __dirname,
26
+ '..',
27
+ '.claude-plugin',
28
+ 'marketplace.json',
29
+ );
30
+
31
+ // Seam for tests: callers may override `ghRunner` to stub API responses.
32
+ let ghRunner = defaultGhRunner;
33
+
34
+ function defaultGhRunner(args) {
35
+ try {
36
+ return execFileSync('gh', args, {
37
+ encoding: 'utf8',
38
+ stdio: ['ignore', 'pipe', 'pipe'],
39
+ }).trim();
40
+ } catch (err) {
41
+ const stderr = err.stderr ? err.stderr.toString() : '';
42
+ const e = new Error(`gh ${args.join(' ')} failed: ${stderr || err.message}`);
43
+ e.stderr = stderr;
44
+ throw e;
45
+ }
46
+ }
47
+
48
+ function gh(args) {
49
+ return ghRunner(args);
50
+ }
51
+
52
+ function setGhRunner(fn) {
53
+ ghRunner = typeof fn === 'function' ? fn : defaultGhRunner;
54
+ }
55
+
56
+ function parseOrgRepo(gitUrl) {
57
+ // https://github.com/agent-sh/<name>.git -> ["agent-sh", "<name>"]
58
+ // https://github.com/agent-sh/<name> -> ["agent-sh", "<name>"]
59
+ // https://github.com/agent-sh/<name>/ -> ["agent-sh", "<name>"]
60
+ // https://github.com/agent-sh/<name>/.git -> ["agent-sh", "<name>"]
61
+ // git@github.com:agent-sh/<name>.git -> ["agent-sh", "<name>"]
62
+ const m = gitUrl.match(
63
+ /github\.com[:/]+([^/]+)\/([^/]+?)\/?(?:\.git)?\/?$/,
64
+ );
65
+ if (!m) throw new Error(`Cannot parse org/repo from ${gitUrl}`);
66
+ return { owner: m[1], repo: m[2] };
67
+ }
68
+
69
+ function resolveTagSha(owner, repo, tag) {
70
+ // Returns the commit SHA the tag resolves to, or null if tag is missing.
71
+ // Uses the singular `git/ref/tags/<tag>` endpoint to get an exact match;
72
+ // the plural `git/refs/tags/<tag>` does prefix matching and can silently
73
+ // return an array when multiple tags share a prefix.
74
+ // Tags may be annotated (object.type === "tag") or lightweight. Annotated
75
+ // tags need a second deref step to the underlying commit.
76
+ let ref;
77
+ try {
78
+ ref = JSON.parse(
79
+ gh(['api', `repos/${owner}/${repo}/git/ref/tags/${tag}`]),
80
+ );
81
+ } catch (err) {
82
+ if (/Not Found|404/i.test(err.stderr || err.message)) return null;
83
+ throw err;
84
+ }
85
+ // Defense in depth: reject unexpected array responses as ambiguous so a
86
+ // future endpoint-behavior shift cannot silently pick the wrong tag.
87
+ if (Array.isArray(ref)) {
88
+ throw new Error(
89
+ `Ambiguous tag lookup for ${owner}/${repo}@${tag}: got array of ${ref.length} refs`,
90
+ );
91
+ }
92
+ if (!ref || !ref.object) return null;
93
+ if (ref.object.type === 'commit') return ref.object.sha;
94
+ if (ref.object.type === 'tag') {
95
+ const annotated = JSON.parse(
96
+ gh(['api', `repos/${owner}/${repo}/git/tags/${ref.object.sha}`]),
97
+ );
98
+ return annotated.object && annotated.object.sha
99
+ ? annotated.object.sha
100
+ : null;
101
+ }
102
+ return null;
103
+ }
104
+
105
+ function defaultBranchHeadSha(owner, repo) {
106
+ // Use HEAD (which the API resolves to the repo's default branch) rather
107
+ // than hardcoding `main`. Works even if the repo still ships `master` or
108
+ // adopts something else later.
109
+ return gh([
110
+ 'api',
111
+ `repos/${owner}/${repo}/commits/HEAD`,
112
+ '--jq',
113
+ '.sha',
114
+ ]);
115
+ }
116
+
117
+ function pinPlugin(plugin) {
118
+ const src = plugin.source;
119
+ if (!src || src.source !== 'url' || !src.url) {
120
+ return { status: 'skipped', name: plugin.name };
121
+ }
122
+
123
+ const { owner, repo } = parseOrgRepo(src.url);
124
+ const version = plugin.version;
125
+ const tag = version ? `v${version}` : null;
126
+
127
+ let sha = null;
128
+ if (tag) {
129
+ sha = resolveTagSha(owner, repo, tag);
130
+ }
131
+
132
+ if (sha) {
133
+ src.ref = tag;
134
+ src.commit = sha;
135
+ return { status: 'pinned', name: plugin.name, tag, sha };
136
+ }
137
+
138
+ const head = defaultBranchHeadSha(owner, repo);
139
+ // Explicitly clear any stale `ref` from a previous run: if the plugin
140
+ // loses its tag (e.g., deleted for a security rewrite) we must not leave
141
+ // the old tag reference around, since downstream installers that prefer
142
+ // `ref` would otherwise ignore the new commit pin.
143
+ delete src.ref;
144
+ src.commit = head;
145
+ return { status: 'fallback', name: plugin.name, wantedTag: tag, sha: head };
146
+ }
147
+
148
+ function main() {
149
+ const raw = fs.readFileSync(MARKETPLACE_PATH, 'utf8');
150
+ const data = JSON.parse(raw);
151
+
152
+ const pinned = [];
153
+ const fallbacks = [];
154
+ const errors = [];
155
+
156
+ for (const plugin of data.plugins) {
157
+ try {
158
+ const result = pinPlugin(plugin);
159
+ if (result.status === 'pinned') {
160
+ pinned.push(result);
161
+ console.log(
162
+ `[OK] ${result.name} -> ${result.tag} (${result.sha.slice(0, 10)})`,
163
+ );
164
+ } else if (result.status === 'fallback') {
165
+ fallbacks.push(result);
166
+ console.log(
167
+ `[WARN] ${result.name} has no tag ${result.wantedTag}; pinning default-branch@${result.sha.slice(0, 10)}`,
168
+ );
169
+ }
170
+ } catch (err) {
171
+ errors.push({ name: plugin.name, error: err.message });
172
+ console.error(`[ERROR] ${plugin.name}: ${err.message}`);
173
+ }
174
+ }
175
+
176
+ const out = JSON.stringify(data, null, 2) + '\n';
177
+
178
+ if (DRY_RUN) {
179
+ console.log('\n[DRY-RUN] Not writing marketplace.json');
180
+ } else if (errors.length === 0) {
181
+ fs.writeFileSync(MARKETPLACE_PATH, out);
182
+ console.log(`\n[OK] Wrote ${MARKETPLACE_PATH}`);
183
+ } else {
184
+ console.log(
185
+ '\n[WARN] Not writing marketplace.json because some plugins failed; re-run after resolving errors.',
186
+ );
187
+ }
188
+
189
+ console.log(
190
+ `\nSummary: ${pinned.length} pinned to tags, ${fallbacks.length} fell back to default-branch SHA, ${errors.length} errors`,
191
+ );
192
+ if (fallbacks.length > 0) {
193
+ console.log('\nFallback plugins (no release tag yet):');
194
+ for (const f of fallbacks) {
195
+ console.log(` - ${f.name}: wanted ${f.wantedTag}, pinned ${f.sha}`);
196
+ }
197
+ }
198
+ if (errors.length > 0) {
199
+ console.log('\nFailed plugins:');
200
+ for (const e of errors) {
201
+ console.log(` - ${e.name}: ${e.error}`);
202
+ }
203
+ return 1;
204
+ }
205
+ return 0;
206
+ }
207
+
208
+ if (require.main === module) {
209
+ try {
210
+ const code = main();
211
+ process.exit(code);
212
+ } catch (err) {
213
+ console.error(`[ERROR] ${err.message}`);
214
+ process.exit(1);
215
+ }
216
+ }
217
+
218
+ module.exports = {
219
+ parseOrgRepo,
220
+ resolveTagSha,
221
+ defaultBranchHeadSha,
222
+ pinPlugin,
223
+ setGhRunner,
224
+ };
package/site/content.json CHANGED
@@ -5,7 +5,7 @@
5
5
  "url": "https://agent-sh.github.io/agentsys",
6
6
  "repo": "https://github.com/agent-sh/agentsys",
7
7
  "npm": "https://www.npmjs.com/package/agentsys",
8
- "version": "5.9.1",
8
+ "version": "5.10.0",
9
9
  "author": "Avi Fenesh",
10
10
  "author_url": "https://github.com/avifenesh"
11
11
  },