skillsets 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -11,6 +11,7 @@ cli/
11
11
  │ ├── commands/ # Command implementations
12
12
  │ │ ├── list.ts
13
13
  │ │ ├── search.ts
14
+ │ │ ├── view.ts
14
15
  │ │ ├── install.ts
15
16
  │ │ ├── init.ts
16
17
  │ │ ├── audit.ts
@@ -42,6 +43,7 @@ cli/
42
43
  |------|---------|---------------|
43
44
  | `list.ts` | Browse all skillsets with live stats | [Docs](./docs_cli/commands/list.md) |
44
45
  | `search.ts` | Fuzzy search by name, description, tags | [Docs](./docs_cli/commands/search.md) |
46
+ | `view.ts` | View a skillset README before installing | [Docs](./docs_cli/commands/view.md) |
45
47
  | `install.ts` | Install skillset via degit + MCP warning + verify checksums | [Docs](./docs_cli/commands/install.md) |
46
48
  | `init.ts` | Scaffold new skillset for contribution | [Docs](./docs_cli/commands/init.md) |
47
49
  | `audit.ts` | Validate skillset + MCP servers before submission | [Docs](./docs_cli/commands/audit.md) |
@@ -58,5 +60,11 @@ cli/
58
60
  | `versions.ts` | Semver comparison | [Docs](./docs_cli/lib/versions.md) |
59
61
  | `validate-mcp.ts` | MCP server bidirectional validation | [Docs](./docs_cli/lib/validate-mcp.md) |
60
62
 
63
+ ### Types
64
+ | File | Purpose | Documentation |
65
+ |------|---------|---------------|
66
+ | `index.ts` | SearchIndex, Skillset interfaces | [Docs](./docs_cli/types/index.md) |
67
+ | `degit.d.ts` | TypeScript declarations for degit package | [Docs](./docs_cli/types/degit.d.md) |
68
+
61
69
  ## Related Documentation
62
70
  - [CLI Style Guide](../.claude/resources/cli_styleguide.md)
@@ -16,13 +16,10 @@ const TEXT_EXTENSIONS = new Set([
16
16
  '.gitignore', '.editorconfig',
17
17
  ]);
18
18
  const SECRET_PATTERNS = [
19
- { name: 'API Key', pattern: /api[_-]?key\s*[:=]\s*['"]?[a-zA-Z0-9]{20,}/gi },
20
- { name: 'Password', pattern: /password\s*[:=]\s*['"]?[^'"\s]{8,}/gi },
21
- { name: 'Secret', pattern: /secret\s*[:=]\s*['"]?[a-zA-Z0-9]{20,}/gi },
22
- { name: 'Token', pattern: /token\s*[:=]\s*['"]?[a-zA-Z0-9]{20,}/gi },
23
19
  { name: 'AWS Key', pattern: /AKIA[0-9A-Z]{16}/g },
24
20
  { name: 'GitHub Token', pattern: /ghp_[a-zA-Z0-9]{36}/g },
25
21
  { name: 'OpenAI Key', pattern: /sk-[a-zA-Z0-9]{48}/g },
22
+ { name: 'Anthropic Key', pattern: /sk-ant-[a-zA-Z0-9_-]{20,}/g },
26
23
  ];
27
24
  function getAllFiles(dir, baseDir = dir) {
28
25
  const files = [];
@@ -64,7 +61,7 @@ function isBinaryFile(filePath) {
64
61
  }
65
62
  }
66
63
  function scanReadmeLinks(cwd) {
67
- const readmePath = join(cwd, 'README.md');
64
+ const readmePath = join(cwd, 'content', 'README.md');
68
65
  if (!existsSync(readmePath))
69
66
  return [];
70
67
  const relativeLinks = [];
@@ -88,8 +85,8 @@ function scanForSecrets(dir) {
88
85
  const secrets = [];
89
86
  const files = getAllFiles(dir);
90
87
  for (const { path: filePath } of files) {
91
- const ext = filePath.substring(filePath.lastIndexOf('.')).toLowerCase();
92
- if (!['.md', '.txt', '.json', '.yaml', '.yml', '.js', '.ts', '.py'].includes(ext))
88
+ const fullPath = join(dir, filePath);
89
+ if (isBinaryFile(fullPath))
93
90
  continue;
94
91
  if (filePath.includes('AUDIT_REPORT'))
95
92
  continue;
@@ -166,9 +163,8 @@ function formatSize(bytes) {
166
163
  return `${(bytes / 1024).toFixed(1)} KB`;
167
164
  return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
168
165
  }
169
- function generateReport(results, cwd, enforceMcp = false) {
170
- const timestamp = new Date().toISOString();
171
- const allPassed = results.manifest.status === 'PASS' &&
166
+ function isAuditPassing(results, enforceMcp) {
167
+ return results.manifest.status === 'PASS' &&
172
168
  results.requiredFiles.status === 'PASS' &&
173
169
  results.contentStructure.status === 'PASS' &&
174
170
  results.fileSize.status !== 'FAIL' &&
@@ -176,16 +172,20 @@ function generateReport(results, cwd, enforceMcp = false) {
176
172
  results.readmeLinks.status === 'PASS' &&
177
173
  results.versionCheck.status === 'PASS' &&
178
174
  (enforceMcp ? results.mcpServers.status === 'PASS' : true);
175
+ }
176
+ function statusIcon(status) {
177
+ if (status === 'PASS')
178
+ return '✓ PASS';
179
+ if (status === 'WARNING')
180
+ return '⚠ WARNING';
181
+ return '✗ FAIL';
182
+ }
183
+ function generateReport(results, cwd, enforceMcp = false) {
184
+ const timestamp = new Date().toISOString();
185
+ const allPassed = isAuditPassing(results, enforceMcp);
179
186
  const submissionType = results.isUpdate
180
187
  ? `Update (${results.existingVersion} → ${results.skillsetVersion})`
181
188
  : 'New submission';
182
- const statusIcon = (status) => {
183
- if (status === 'PASS')
184
- return '✓ PASS';
185
- if (status === 'WARNING')
186
- return '⚠ WARNING';
187
- return '✗ FAIL';
188
- };
189
189
  let report = `# Audit Report
190
190
 
191
191
  **Generated:** ${timestamp}
@@ -332,16 +332,16 @@ export async function audit(options = {}) {
332
332
  }
333
333
  // 2. Required files
334
334
  spinner.text = 'Checking required files...';
335
- const hasReadme = existsSync(join(cwd, 'README.md'));
336
335
  const hasContent = existsSync(join(cwd, 'content'));
336
+ const hasReadme = existsSync(join(cwd, 'content', 'README.md'));
337
337
  const hasSkillsetYaml = existsSync(join(cwd, 'skillset.yaml'));
338
338
  const missingFiles = [];
339
339
  if (!hasSkillsetYaml)
340
340
  missingFiles.push('skillset.yaml');
341
- if (!hasReadme)
342
- missingFiles.push('README.md');
343
341
  if (!hasContent)
344
342
  missingFiles.push('content/');
343
+ if (!hasReadme)
344
+ missingFiles.push('content/README.md');
345
345
  if (missingFiles.length === 0) {
346
346
  results.requiredFiles = { status: 'PASS', details: 'All present' };
347
347
  }
@@ -356,18 +356,18 @@ export async function audit(options = {}) {
356
356
  spinner.text = 'Verifying content structure...';
357
357
  const hasClaudeDir = existsSync(join(cwd, 'content', '.claude'));
358
358
  const hasClaudeMd = existsSync(join(cwd, 'content', 'CLAUDE.md'));
359
- if (hasClaudeDir || hasClaudeMd) {
360
- const found = [hasClaudeDir && '.claude/', hasClaudeMd && 'CLAUDE.md'].filter(Boolean);
359
+ if (hasClaudeDir && hasClaudeMd) {
361
360
  results.contentStructure = {
362
361
  status: 'PASS',
363
- details: `Found: ${found.join(', ')}`,
362
+ details: 'Found: .claude/, CLAUDE.md',
364
363
  };
365
364
  }
366
365
  else {
366
+ const missing = [!hasClaudeDir && '.claude/', !hasClaudeMd && 'CLAUDE.md'].filter(Boolean);
367
367
  results.contentStructure = {
368
368
  status: 'FAIL',
369
- details: 'No .claude/ or CLAUDE.md',
370
- findings: 'content/ must contain either .claude/ directory or CLAUDE.md file',
369
+ details: `Missing: ${missing.join(', ')}`,
370
+ findings: 'content/ must contain both .claude/ directory and CLAUDE.md file',
371
371
  };
372
372
  }
373
373
  // 4. File size check
@@ -495,32 +495,30 @@ export async function audit(options = {}) {
495
495
  }
496
496
  spinner.succeed(options.check ? 'Validation complete' : 'Audit complete');
497
497
  // Summary
498
- const allPassed = results.manifest.status === 'PASS' &&
499
- results.requiredFiles.status === 'PASS' &&
500
- results.contentStructure.status === 'PASS' &&
501
- results.fileSize.status !== 'FAIL' &&
502
- results.secrets.status === 'PASS' &&
503
- results.readmeLinks.status === 'PASS' &&
504
- results.versionCheck.status === 'PASS' &&
505
- (options.check ? results.mcpServers.status === 'PASS' : true);
506
- console.log('\n' + chalk.bold('Audit Summary:'));
507
- console.log('');
508
- const icon = (status) => {
498
+ const allPassed = isAuditPassing(results, !!options.check);
499
+ const colorIcon = (status) => {
509
500
  if (status === 'PASS')
510
501
  return chalk.green('✓');
511
502
  if (status === 'WARNING')
512
503
  return chalk.yellow('⚠');
513
504
  return chalk.red('✗');
514
505
  };
515
- console.log(` ${icon(results.manifest.status)} Manifest: ${results.manifest.details}`);
516
- console.log(` ${icon(results.requiredFiles.status)} Required Files: ${results.requiredFiles.details}`);
517
- console.log(` ${icon(results.contentStructure.status)} Content Structure: ${results.contentStructure.details}`);
518
- console.log(` ${icon(results.fileSize.status)} File Sizes: ${results.fileSize.details}`);
519
- console.log(` ${icon(results.binary.status)} Binary Files: ${results.binary.details}`);
520
- console.log(` ${icon(results.secrets.status)} Secrets: ${results.secrets.details}`);
521
- console.log(` ${icon(results.readmeLinks.status)} README Links: ${results.readmeLinks.details}`);
522
- console.log(` ${icon(results.versionCheck.status)} Version: ${results.versionCheck.details}`);
523
- console.log(` ${icon(results.mcpServers.status)} MCP Servers: ${results.mcpServers.details}`);
506
+ const checks = [
507
+ [results.manifest, 'Manifest'],
508
+ [results.requiredFiles, 'Required Files'],
509
+ [results.contentStructure, 'Content Structure'],
510
+ [results.fileSize, 'File Sizes'],
511
+ [results.binary, 'Binary Files'],
512
+ [results.secrets, 'Secrets'],
513
+ [results.readmeLinks, 'README Links'],
514
+ [results.versionCheck, 'Version'],
515
+ [results.mcpServers, 'MCP Servers'],
516
+ ];
517
+ console.log('\n' + chalk.bold('Audit Summary:'));
518
+ console.log('');
519
+ for (const [result, label] of checks) {
520
+ console.log(` ${colorIcon(result.status)} ${label}: ${result.details}`);
521
+ }
524
522
  console.log('');
525
523
  if (allPassed) {
526
524
  console.log(chalk.green('✓ READY FOR SUBMISSION'));
@@ -4,7 +4,9 @@ import { input, confirm, checkbox } from '@inquirer/prompts';
4
4
  import { existsSync, mkdirSync, copyFileSync, readdirSync, writeFileSync } from 'fs';
5
5
  import { join } from 'path';
6
6
  import degit from 'degit';
7
+ import { execSync } from 'child_process';
7
8
  const SKILLSET_YAML_TEMPLATE = `schema_version: "1.0"
9
+ batch_id: "{{BATCH_ID}}"
8
10
 
9
11
  # Identity
10
12
  name: "{{NAME}}"
@@ -95,6 +97,48 @@ function copyDirRecursive(src, dest) {
95
97
  }
96
98
  export async function init(options) {
97
99
  console.log(chalk.blue('\n📦 Initialize a new skillset submission\n'));
100
+ // 1. Verify gh CLI is available and authenticated
101
+ try {
102
+ execSync('gh auth status', { stdio: 'pipe' });
103
+ }
104
+ catch {
105
+ console.error(chalk.red('Error: gh CLI not authenticated.'));
106
+ console.error('Install: https://cli.github.com');
107
+ console.error('Then run: gh auth login');
108
+ process.exit(1);
109
+ }
110
+ // 2. Get GitHub user info (verified identity)
111
+ let login;
112
+ let id;
113
+ try {
114
+ const userJson = execSync('gh api user', { encoding: 'utf-8' });
115
+ const userData = JSON.parse(userJson);
116
+ login = userData.login;
117
+ id = userData.id;
118
+ }
119
+ catch (error) {
120
+ console.error(chalk.red('Error: Failed to get GitHub user info.'));
121
+ console.error('Please ensure gh CLI is properly authenticated.');
122
+ process.exit(1);
123
+ }
124
+ // 3. Look up reservation
125
+ let batchId;
126
+ try {
127
+ const res = await fetch(`https://skillsets.cc/api/reservations/lookup?githubId=${encodeURIComponent(String(id))}`);
128
+ const lookupData = await res.json();
129
+ if (!lookupData.batchId) {
130
+ console.error(chalk.red('No active reservation found.'));
131
+ console.error('Visit https://skillsets.cc to claim a slot first.');
132
+ process.exit(1);
133
+ }
134
+ batchId = lookupData.batchId;
135
+ console.log(chalk.green(`\nReservation found: ${batchId}`));
136
+ }
137
+ catch (error) {
138
+ console.error(chalk.red('Error: Failed to look up reservation.'));
139
+ console.error('Please check your network connection and try again.');
140
+ process.exit(1);
141
+ }
98
142
  const cwd = process.cwd();
99
143
  // Check if already initialized
100
144
  if (existsSync(join(cwd, 'skillset.yaml'))) {
@@ -132,6 +176,7 @@ export async function init(options) {
132
176
  });
133
177
  const authorHandle = await input({
134
178
  message: 'GitHub handle (e.g., @username):',
179
+ default: `@${login}`,
135
180
  validate: (value) => {
136
181
  if (!/^@[A-Za-z0-9_-]+$/.test(value)) {
137
182
  return 'Handle must start with @ followed by alphanumeric characters';
@@ -142,6 +187,15 @@ export async function init(options) {
142
187
  const authorUrl = await input({
143
188
  message: 'Author URL (GitHub profile or website):',
144
189
  default: `https://github.com/${authorHandle.slice(1)}`,
190
+ validate: (value) => {
191
+ try {
192
+ new URL(value);
193
+ return true;
194
+ }
195
+ catch {
196
+ return 'Must be a valid URL';
197
+ }
198
+ },
145
199
  });
146
200
  const productionUrl = await input({
147
201
  message: 'Production URL (live deployment, repo, or case study):',
@@ -172,13 +226,11 @@ export async function init(options) {
172
226
  });
173
227
  const tags = tagsInput.split(',').map((t) => t.trim());
174
228
  // Auto-detect existing files
175
- const detectedFiles = [];
176
- if (existsSync(join(cwd, '.claude'))) {
177
- detectedFiles.push('.claude/');
178
- }
179
- if (existsSync(join(cwd, 'CLAUDE.md'))) {
180
- detectedFiles.push('CLAUDE.md');
181
- }
229
+ const candidateFiles = ['CLAUDE.md', 'README.md', '.claude/', '.mcp.json', 'docker/'];
230
+ const detectedFiles = candidateFiles.filter((f) => {
231
+ const checkPath = f.endsWith('/') ? f.slice(0, -1) : f;
232
+ return existsSync(join(cwd, checkPath));
233
+ });
182
234
  let filesToCopy = [];
183
235
  if (detectedFiles.length > 0) {
184
236
  console.log(chalk.green('\n✓ Detected existing skillset files:'));
@@ -209,6 +261,7 @@ export async function init(options) {
209
261
  // Generate skillset.yaml
210
262
  const tagsYaml = tags.map((t) => ` - "${t}"`).join('\n');
211
263
  const skillsetYaml = SKILLSET_YAML_TEMPLATE
264
+ .replace('{{BATCH_ID}}', batchId)
212
265
  .replace('{{NAME}}', name)
213
266
  .replace('{{DESCRIPTION}}', description)
214
267
  .replace('{{AUTHOR_HANDLE}}', authorHandle)
@@ -216,13 +269,13 @@ export async function init(options) {
216
269
  .replace('{{PRODUCTION_URL}}', productionUrl)
217
270
  .replace('{{TAGS}}', tagsYaml);
218
271
  writeFileSync(join(cwd, 'skillset.yaml'), skillsetYaml);
219
- // Generate README.md (if not copying existing)
220
- if (!existsSync(join(cwd, 'README.md'))) {
272
+ // Generate content/README.md (if not copying existing)
273
+ if (!existsSync(join(cwd, 'content', 'README.md'))) {
221
274
  const readme = README_TEMPLATE
222
275
  .replace(/\{\{NAME\}\}/g, name)
223
276
  .replace(/\{\{DESCRIPTION\}\}/g, description)
224
277
  .replace(/\{\{AUTHOR_HANDLE\}\}/g, authorHandle);
225
- writeFileSync(join(cwd, 'README.md'), readme);
278
+ writeFileSync(join(cwd, 'content', 'README.md'), readme);
226
279
  }
227
280
  // Generate PROOF.md
228
281
  const proof = PROOF_TEMPLATE.replace('{{PRODUCTION_URL}}', productionUrl);
@@ -240,9 +293,9 @@ export async function init(options) {
240
293
  // Summary
241
294
  console.log(chalk.green('\n✓ Initialized skillset submission:\n'));
242
295
  console.log(' skillset.yaml - Manifest (edit as needed)');
243
- console.log(' README.md - Documentation');
244
296
  console.log(' PROOF.md - Production evidence (add details)');
245
297
  console.log(' content/ - Installable files');
298
+ console.log(' ├── README.md - Documentation');
246
299
  if (filesToCopy.length > 0) {
247
300
  filesToCopy.forEach((f) => console.log(` └── ${f}`));
248
301
  }
@@ -6,6 +6,10 @@ import { detectConflicts, backupFiles } from '../lib/filesystem.js';
6
6
  import { verifyChecksums } from '../lib/checksum.js';
7
7
  import { fetchSkillsetMetadata } from '../lib/api.js';
8
8
  import { REGISTRY_REPO, DOWNLOADS_URL } from '../lib/constants.js';
9
+ import { mkdtemp, rm, cp, readdir } from 'fs/promises';
10
+ import { existsSync } from 'fs';
11
+ import { tmpdir } from 'os';
12
+ import { join } from 'path';
9
13
  function formatMcpWarning(mcpServers, skillsetId) {
10
14
  let output = chalk.yellow('\n⚠ This skillset includes MCP servers:\n');
11
15
  const nativeServers = mcpServers.filter(s => s.type !== 'docker');
@@ -36,6 +40,13 @@ function formatMcpWarning(mcpServers, skillsetId) {
36
40
  }
37
41
  export async function install(skillsetId, options) {
38
42
  const spinner = ora(`Installing ${skillsetId}...`).start();
43
+ // Validate skillsetId format
44
+ if (!/^@[A-Za-z0-9_-]+\/[A-Za-z0-9_-]+$/.test(skillsetId)) {
45
+ spinner.fail('Invalid skillset ID');
46
+ console.log(chalk.red('\nExpected format: @author/name'));
47
+ console.log(chalk.gray('Example: @supercollectible/Valence'));
48
+ return;
49
+ }
39
50
  // Check for conflicts
40
51
  const conflicts = await detectConflicts(process.cwd());
41
52
  if (conflicts.length > 0 && !options.force && !options.backup) {
@@ -53,6 +64,7 @@ export async function install(skillsetId, options) {
53
64
  await backupFiles(conflicts, process.cwd());
54
65
  }
55
66
  // Fetch metadata and check for MCP servers BEFORE degit
67
+ let metadataFetchFailed = false;
56
68
  spinner.text = 'Fetching skillset metadata...';
57
69
  try {
58
70
  const metadata = await fetchSkillsetMetadata(skillsetId);
@@ -79,30 +91,76 @@ export async function install(skillsetId, options) {
79
91
  }
80
92
  }
81
93
  catch {
82
- // If metadata fetch fails, continue without MCP check
83
- // (registry might be down, don't block install)
94
+ // Metadata fetch failed flag for post-install content check
95
+ metadataFetchFailed = true;
84
96
  }
85
- // Install using degit (extract content/ subdirectory)
97
+ // Install to temp directory first (verify before writing to cwd)
86
98
  spinner.text = 'Downloading skillset...';
87
- const emitter = degit(`${REGISTRY_REPO}/skillsets/${skillsetId}/content`, {
88
- cache: false,
89
- force: true,
90
- verbose: false,
91
- });
92
- await emitter.clone(process.cwd());
93
- // Verify checksums
94
- spinner.text = 'Verifying checksums...';
95
- const result = await verifyChecksums(skillsetId, process.cwd());
96
- if (!result.valid) {
97
- spinner.fail('Checksum verification failed - files may be corrupted');
98
- console.log(chalk.red('\nInstallation aborted due to checksum mismatch.'));
99
- console.log(chalk.yellow('This could indicate:'));
100
- console.log(' - Network issues during download');
101
- console.log(' - Corrupted files in the registry');
102
- console.log(' - Tampering with the downloaded content');
103
- console.log(chalk.cyan('\nTo retry:'));
104
- console.log(` npx skillsets install ${skillsetId} --force`);
105
- process.exit(1);
99
+ const tempDir = await mkdtemp(join(tmpdir(), 'skillsets-'));
100
+ try {
101
+ const emitter = degit(`${REGISTRY_REPO}/skillsets/${skillsetId}/content`, {
102
+ cache: false,
103
+ force: true,
104
+ verbose: false,
105
+ });
106
+ await emitter.clone(tempDir);
107
+ // Post-install MCP check: if metadata fetch failed, inspect downloaded content
108
+ if (metadataFetchFailed) {
109
+ const hasMcpJson = existsSync(join(tempDir, '.mcp.json'));
110
+ const hasClaudeSettings = existsSync(join(tempDir, '.claude', 'settings.json'));
111
+ if (hasMcpJson || hasClaudeSettings) {
112
+ spinner.stop();
113
+ if (!process.stdin.isTTY && !options.acceptMcp) {
114
+ console.log(chalk.red('This skillset includes MCP servers. Use --accept-mcp to install in non-interactive environments.'));
115
+ await rm(tempDir, { recursive: true, force: true });
116
+ process.exit(1);
117
+ return;
118
+ }
119
+ if (!options.acceptMcp) {
120
+ console.log(chalk.yellow('\n⚠ This skillset may include MCP servers (metadata unavailable for pre-check).'));
121
+ console.log(chalk.cyan(`\n Review before installing:\n https://github.com/skillsets-cc/main/tree/main/skillsets/${skillsetId}/content\n`));
122
+ const accepted = await confirm({
123
+ message: 'Continue installation?',
124
+ default: false,
125
+ });
126
+ if (!accepted) {
127
+ console.log(chalk.gray('\nInstallation cancelled.'));
128
+ await rm(tempDir, { recursive: true, force: true });
129
+ return;
130
+ }
131
+ }
132
+ spinner.start('Verifying checksums...');
133
+ }
134
+ }
135
+ // Verify checksums against temp directory
136
+ spinner.text = 'Verifying checksums...';
137
+ const result = await verifyChecksums(skillsetId, tempDir);
138
+ if (!result.valid) {
139
+ spinner.fail('Checksum verification failed - files may be corrupted');
140
+ console.log(chalk.red('\nInstallation aborted due to checksum mismatch.'));
141
+ console.log(chalk.yellow('This could indicate:'));
142
+ console.log(' - Network issues during download');
143
+ console.log(' - Corrupted files in the registry');
144
+ console.log(' - Tampering with the downloaded content');
145
+ console.log(chalk.cyan('\nTo retry:'));
146
+ console.log(` npx skillsets install ${skillsetId} --force`);
147
+ await rm(tempDir, { recursive: true, force: true });
148
+ process.exit(1);
149
+ }
150
+ // Checksums valid — move verified content to cwd
151
+ spinner.text = 'Installing verified content...';
152
+ const entries = await readdir(tempDir, { withFileTypes: true });
153
+ for (const entry of entries) {
154
+ await cp(join(tempDir, entry.name), join(process.cwd(), entry.name), {
155
+ recursive: true,
156
+ force: true,
157
+ });
158
+ }
159
+ await rm(tempDir, { recursive: true, force: true });
160
+ }
161
+ catch (error) {
162
+ await rm(tempDir, { recursive: true, force: true }).catch(() => { });
163
+ throw error;
106
164
  }
107
165
  spinner.succeed(`Successfully installed ${skillsetId}`);
108
166
  // Track download (non-blocking, silent fail)
@@ -7,7 +7,7 @@ import yaml from 'js-yaml';
7
7
  import { tmpdir } from 'os';
8
8
  import { fetchSkillsetMetadata } from '../lib/api.js';
9
9
  import { compareVersions } from '../lib/versions.js';
10
- const REGISTRY_REPO = 'skillsets-cc/main';
10
+ import { REGISTRY_REPO } from '../lib/constants.js';
11
11
  const REGISTRY_URL = `https://github.com/${REGISTRY_REPO}`;
12
12
  function checkGhCli() {
13
13
  try {
@@ -116,7 +116,7 @@ export async function submit() {
116
116
  }
117
117
  console.log(chalk.green('✓ Audit report passing'));
118
118
  // 6. Check required files
119
- const requiredFiles = ['skillset.yaml', 'README.md', 'PROOF.md', 'AUDIT_REPORT.md', 'content'];
119
+ const requiredFiles = ['skillset.yaml', 'PROOF.md', 'AUDIT_REPORT.md', 'content'];
120
120
  for (const file of requiredFiles) {
121
121
  if (!existsSync(join(cwd, file))) {
122
122
  console.log(chalk.red(`✗ Missing required: ${file}`));
@@ -181,7 +181,7 @@ export async function submit() {
181
181
  mkdirSync(skillsetDir, { recursive: true });
182
182
  // Copy files
183
183
  spinner.text = 'Copying skillset files...';
184
- const filesToCopy = ['skillset.yaml', 'README.md', 'PROOF.md', 'AUDIT_REPORT.md', 'content'];
184
+ const filesToCopy = ['skillset.yaml', 'PROOF.md', 'AUDIT_REPORT.md', 'content'];
185
185
  for (const file of filesToCopy) {
186
186
  const src = join(cwd, file);
187
187
  const dest = join(skillsetDir, file);
@@ -0,0 +1 @@
1
+ export declare function view(skillsetId: string): Promise<void>;
@@ -0,0 +1,28 @@
1
+ import chalk from 'chalk';
2
+ import ora from 'ora';
3
+ import { fetchSkillsetMetadata } from '../lib/api.js';
4
+ import { GITHUB_RAW_BASE } from '../lib/constants.js';
5
+ export async function view(skillsetId) {
6
+ const spinner = ora('Fetching README...').start();
7
+ const metadata = await fetchSkillsetMetadata(skillsetId);
8
+ if (!metadata) {
9
+ spinner.fail(`Skillset '${skillsetId}' not found`);
10
+ throw new Error(`Skillset '${skillsetId}' not found`);
11
+ }
12
+ const [namespace, name] = skillsetId.split('/');
13
+ const encodedPath = encodeURIComponent(namespace) + '/' + encodeURIComponent(name);
14
+ const url = `${GITHUB_RAW_BASE}/skillsets/${encodedPath}/content/README.md`;
15
+ const response = await fetch(url);
16
+ if (!response.ok) {
17
+ spinner.fail(`Could not fetch README for '${skillsetId}'`);
18
+ throw new Error(`Could not fetch README for '${skillsetId}'`);
19
+ }
20
+ spinner.stop();
21
+ const readme = await response.text();
22
+ console.log();
23
+ console.log(chalk.bold(` ${skillsetId}`));
24
+ console.log();
25
+ console.log(chalk.dim(' ' + '─'.repeat(50)));
26
+ console.log();
27
+ console.log(readme);
28
+ }
package/dist/index.js CHANGED
@@ -2,6 +2,7 @@
2
2
  import { program } from 'commander';
3
3
  import { search } from './commands/search.js';
4
4
  import { list } from './commands/list.js';
5
+ import { view } from './commands/view.js';
5
6
  import { install } from './commands/install.js';
6
7
  import { init } from './commands/init.js';
7
8
  import { audit } from './commands/audit.js';
@@ -40,6 +41,18 @@ program
40
41
  handleError(error);
41
42
  }
42
43
  });
44
+ program
45
+ .command('view')
46
+ .description('View a skillset README before installing')
47
+ .argument('<skillsetId>', 'Skillset ID (e.g., @user/skillset-name)')
48
+ .action(async (skillsetId) => {
49
+ try {
50
+ await view(skillsetId);
51
+ }
52
+ catch (error) {
53
+ handleError(error);
54
+ }
55
+ });
43
56
  program
44
57
  .command('install')
45
58
  .description('Install a skillset to the current directory')
@@ -6,7 +6,7 @@ import { fetchSkillsetMetadata } from './api.js';
6
6
  * Computes SHA-256 checksum for a file.
7
7
  */
8
8
  export async function computeFileChecksum(filePath) {
9
- const content = await fs.readFile(filePath, 'utf-8');
9
+ const content = await fs.readFile(filePath);
10
10
  return crypto.createHash('sha256').update(content).digest('hex');
11
11
  }
12
12
  /**
@@ -3,6 +3,7 @@ export declare const SEARCH_INDEX_URL = "https://skillsets.cc/search-index.json"
3
3
  export declare const STATS_URL = "https://skillsets.cc/api/stats/counts";
4
4
  export declare const DOWNLOADS_URL = "https://skillsets.cc/api/downloads";
5
5
  export declare const REGISTRY_REPO = "skillsets-cc/main";
6
+ export declare const GITHUB_RAW_BASE = "https://raw.githubusercontent.com/skillsets-cc/main/main";
6
7
  export declare const CACHE_TTL_MS: number;
7
8
  export declare const DEFAULT_SEARCH_LIMIT = 10;
8
9
  export declare const BACKUP_DIR_NAME = ".claude.backup";
@@ -3,6 +3,7 @@ export const SEARCH_INDEX_URL = `${CDN_BASE_URL}/search-index.json`;
3
3
  export const STATS_URL = `${CDN_BASE_URL}/api/stats/counts`;
4
4
  export const DOWNLOADS_URL = `${CDN_BASE_URL}/api/downloads`;
5
5
  export const REGISTRY_REPO = 'skillsets-cc/main';
6
+ export const GITHUB_RAW_BASE = `https://raw.githubusercontent.com/${REGISTRY_REPO}/main`;
6
7
  export const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
7
8
  export const DEFAULT_SEARCH_LIMIT = 10;
8
9
  export const BACKUP_DIR_NAME = '.claude.backup';
@@ -49,22 +49,18 @@ export function validateMcpServers(skillsetDir) {
49
49
  return { valid: errors.length === 0, errors };
50
50
  }
51
51
  /**
52
- * Collect MCP servers from content files (.mcp.json, .claude/settings.json, docker configs)
52
+ * Parse native MCP servers from a JSON file containing an `mcpServers` key.
53
+ * Deduplicates against existing servers by name.
53
54
  */
54
- function collectContentServers(skillsetDir, errors) {
55
- const servers = [];
56
- const contentDir = join(skillsetDir, 'content');
57
- if (!existsSync(contentDir)) {
58
- return servers;
59
- }
60
- // 1. Check .mcp.json
61
- const mcpJsonPath = join(contentDir, '.mcp.json');
62
- if (existsSync(mcpJsonPath)) {
63
- try {
64
- const content = readFileSync(mcpJsonPath, 'utf-8');
65
- const data = JSON.parse(content);
66
- if (data.mcpServers && typeof data.mcpServers === 'object') {
67
- for (const [name, config] of Object.entries(data.mcpServers)) {
55
+ function parseNativeServersFromJson(filePath, servers, errors) {
56
+ if (!existsSync(filePath))
57
+ return;
58
+ try {
59
+ const content = readFileSync(filePath, 'utf-8');
60
+ const data = JSON.parse(content);
61
+ if (data.mcpServers && typeof data.mcpServers === 'object') {
62
+ for (const [name, config] of Object.entries(data.mcpServers)) {
63
+ if (!servers.some(s => s.name === name && s.source === 'native')) {
68
64
  servers.push({
69
65
  name,
70
66
  source: 'native',
@@ -75,58 +71,29 @@ function collectContentServers(skillsetDir, errors) {
75
71
  }
76
72
  }
77
73
  }
78
- catch (error) {
79
- errors.push(`Failed to parse .mcp.json: ${error.message}`);
80
- }
81
74
  }
82
- // 2. Check .claude/settings.json
83
- const settingsPath = join(contentDir, '.claude', 'settings.json');
84
- if (existsSync(settingsPath)) {
85
- try {
86
- const content = readFileSync(settingsPath, 'utf-8');
87
- const data = JSON.parse(content);
88
- if (data.mcpServers && typeof data.mcpServers === 'object') {
89
- for (const [name, config] of Object.entries(data.mcpServers)) {
90
- // Avoid duplicates (same name might be in both files)
91
- if (!servers.some(s => s.name === name && s.source === 'native')) {
92
- servers.push({
93
- name,
94
- source: 'native',
95
- command: config.command,
96
- args: config.args,
97
- url: config.url,
98
- });
99
- }
100
- }
101
- }
102
- }
103
- catch (error) {
104
- errors.push(`Failed to parse .claude/settings.json: ${error.message}`);
105
- }
75
+ catch (error) {
76
+ const label = filePath.split('/').slice(-2).join('/');
77
+ errors.push(`Failed to parse ${label}: ${error.message}`);
106
78
  }
107
- // 3. Check .claude/settings.local.json
108
- const settingsLocalPath = join(contentDir, '.claude', 'settings.local.json');
109
- if (existsSync(settingsLocalPath)) {
110
- try {
111
- const content = readFileSync(settingsLocalPath, 'utf-8');
112
- const data = JSON.parse(content);
113
- if (data.mcpServers && typeof data.mcpServers === 'object') {
114
- for (const [name, config] of Object.entries(data.mcpServers)) {
115
- if (!servers.some(s => s.name === name && s.source === 'native')) {
116
- servers.push({
117
- name,
118
- source: 'native',
119
- command: config.command,
120
- args: config.args,
121
- url: config.url,
122
- });
123
- }
124
- }
125
- }
126
- }
127
- catch (error) {
128
- errors.push(`Failed to parse .claude/settings.local.json: ${error.message}`);
129
- }
79
+ }
80
+ /**
81
+ * Collect MCP servers from content files (.mcp.json, .claude/settings.json, docker configs)
82
+ */
83
+ function collectContentServers(skillsetDir, errors) {
84
+ const servers = [];
85
+ const contentDir = join(skillsetDir, 'content');
86
+ if (!existsSync(contentDir)) {
87
+ return servers;
88
+ }
89
+ // Native MCP server sources (order matters for dedup: first found wins)
90
+ const nativeJsonPaths = [
91
+ join(contentDir, '.mcp.json'),
92
+ join(contentDir, '.claude', 'settings.json'),
93
+ join(contentDir, '.claude', 'settings.local.json'),
94
+ ];
95
+ for (const jsonPath of nativeJsonPaths) {
96
+ parseNativeServersFromJson(jsonPath, servers, errors);
130
97
  }
131
98
  // 4. Check docker/**/*.yaml and docker/**/*.yml for mcp_servers key
132
99
  const dockerDir = join(contentDir, 'docker');
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skillsets",
3
- "version": "0.3.0",
3
+ "version": "0.4.0",
4
4
  "description": "CLI tool for discovering and installing verified skillsets",
5
5
  "type": "module",
6
6
  "bin": {
@@ -21,7 +21,7 @@
21
21
  "scripts": {
22
22
  "build": "tsc",
23
23
  "dev": "tsc && node dist/index.js",
24
- "test": "vitest",
24
+ "test": "vitest --run",
25
25
  "typecheck": "tsc --noEmit",
26
26
  "prepublishOnly": "npm run build"
27
27
  },