hackmyagent 0.16.0 → 0.16.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/.integrity-manifest.json +1 -1
  2. package/dist/arp/intelligence/nanomind-l1.d.ts +30 -0
  3. package/dist/arp/intelligence/nanomind-l1.d.ts.map +1 -1
  4. package/dist/arp/intelligence/nanomind-l1.js +115 -0
  5. package/dist/arp/intelligence/nanomind-l1.js.map +1 -1
  6. package/dist/cli.js +220 -19
  7. package/dist/cli.js.map +1 -1
  8. package/dist/hardening/scanner.d.ts.map +1 -1
  9. package/dist/hardening/scanner.js +125 -4
  10. package/dist/hardening/scanner.js.map +1 -1
  11. package/dist/hardening/taxonomy.d.ts +2 -0
  12. package/dist/hardening/taxonomy.d.ts.map +1 -1
  13. package/dist/hardening/taxonomy.js +5 -0
  14. package/dist/hardening/taxonomy.js.map +1 -1
  15. package/dist/nanomind-core/analyzers/stego-analyzer.d.ts +30 -0
  16. package/dist/nanomind-core/analyzers/stego-analyzer.d.ts.map +1 -0
  17. package/dist/nanomind-core/analyzers/stego-analyzer.js +533 -0
  18. package/dist/nanomind-core/analyzers/stego-analyzer.js.map +1 -0
  19. package/dist/nanomind-core/daemon-lifecycle.d.ts +28 -0
  20. package/dist/nanomind-core/daemon-lifecycle.d.ts.map +1 -0
  21. package/dist/nanomind-core/daemon-lifecycle.js +142 -0
  22. package/dist/nanomind-core/daemon-lifecycle.js.map +1 -0
  23. package/dist/nanomind-core/inference/tme-classifier.d.ts +3 -2
  24. package/dist/nanomind-core/inference/tme-classifier.d.ts.map +1 -1
  25. package/dist/nanomind-core/inference/tme-classifier.js +26 -16
  26. package/dist/nanomind-core/inference/tme-classifier.js.map +1 -1
  27. package/dist/nanomind-core/orchestrate.d.ts.map +1 -1
  28. package/dist/nanomind-core/orchestrate.js +11 -1
  29. package/dist/nanomind-core/orchestrate.js.map +1 -1
  30. package/dist/nanomind-core/scanner-bridge.d.ts.map +1 -1
  31. package/dist/nanomind-core/scanner-bridge.js +6 -0
  32. package/dist/nanomind-core/scanner-bridge.js.map +1 -1
  33. package/dist/plugins/credvault.d.ts.map +1 -1
  34. package/dist/plugins/credvault.js +25 -0
  35. package/dist/plugins/credvault.js.map +1 -1
  36. package/dist/semantic/nanomind-enhancer.d.ts.map +1 -1
  37. package/dist/semantic/nanomind-enhancer.js +206 -0
  38. package/dist/semantic/nanomind-enhancer.js.map +1 -1
  39. package/dist/telemetry/nanomind-feedback.d.ts +43 -0
  40. package/dist/telemetry/nanomind-feedback.d.ts.map +1 -0
  41. package/dist/telemetry/nanomind-feedback.js +104 -0
  42. package/dist/telemetry/nanomind-feedback.js.map +1 -0
  43. package/dist/telemetry/nanomind-telemetry.d.ts +48 -0
  44. package/dist/telemetry/nanomind-telemetry.d.ts.map +1 -0
  45. package/dist/telemetry/nanomind-telemetry.js +123 -0
  46. package/dist/telemetry/nanomind-telemetry.js.map +1 -0
  47. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -188,7 +188,9 @@ Examples:
188
188
  $ hackmyagent check @publisher/skill --verbose
189
189
  $ hackmyagent check pip:requests
190
190
  $ hackmyagent check pypi:flask
191
- $ hackmyagent check modelcontextprotocol/servers --json`)
191
+ $ hackmyagent check modelcontextprotocol/servers --json
192
+ $ hackmyagent check https://gitlab.com/org/repo
193
+ $ hackmyagent check https://example.com/agent-v1.tar.gz`)
192
194
  .argument('<target>', 'npm package, PyPI package (pip: or pypi: prefix), local path, GitHub repo, or skill identifier')
193
195
  .option('-v, --verbose', 'Show detailed verification info')
194
196
  .option('--json', 'Output as JSON (for scripting/CI)')
@@ -257,6 +259,11 @@ Examples:
257
259
  await checkGitHubRepo(skill, options);
258
260
  return;
259
261
  }
262
+ // Raw URL (non-GitHub): fetch/clone based on content type
263
+ if (looksLikeRawUrl(skill)) {
264
+ await checkRawUrl(skill, options);
265
+ return;
266
+ }
260
267
  // npm package name: download, run full HMA scan, clean up
261
268
  if (looksLikeNpmPackage(skill)) {
262
269
  await checkNpmPackage(skill, options);
@@ -5191,26 +5198,44 @@ Examples:
5191
5198
  });
5192
5199
  program
5193
5200
  .command('check-metadata')
5194
- .description('Export metadata for all security checks by scanning test fixtures (JSON)')
5195
- .option('-d, --directory <dir>', 'Directory to scan for check metadata extraction')
5201
+ .description('Export metadata for all security checks (JSON)')
5202
+ .option('-d, --directory <dir>', 'Scan a specific directory to collect check metadata from findings')
5203
+ .option('--json', 'Output as JSON (default)')
5196
5204
  .action(async (options) => {
5197
- const { getAttackClass } = require('./hardening/taxonomy');
5198
- const targetDir = options.directory || process.cwd();
5199
- // Run a real scan to collect all check metadata from findings
5200
- const scanner = new index_1.HardeningScanner();
5201
- const result = await scanner.scan({ targetDir, autoFix: false, scanDepth: 'deep' });
5205
+ const { getAttackClass, getTaxonomyMap } = require('./hardening/taxonomy');
5206
+ // Build static registry from taxonomy map (covers all known checks)
5207
+ const taxMap = getTaxonomyMap();
5202
5208
  const metadata = {};
5203
- for (const finding of result.findings) {
5204
- if (!metadata[finding.checkId]) {
5205
- metadata[finding.checkId] = {
5206
- checkId: finding.checkId,
5207
- name: finding.name,
5208
- category: finding.category,
5209
- attackClass: getAttackClass(finding.checkId) || '',
5210
- severity: finding.severity,
5211
- fix: finding.fix || '',
5212
- guidance: finding.guidance || '',
5213
- };
5209
+ // Add all checks from taxonomy (the authoritative source of check IDs)
5210
+ for (const checkId of Object.keys(taxMap)) {
5211
+ const prefix = checkId.split('-').slice(0, -1).join('-') || checkId.split('-')[0];
5212
+ metadata[checkId] = {
5213
+ checkId,
5214
+ name: checkId,
5215
+ category: prefix.toLowerCase(),
5216
+ attackClass: taxMap[checkId] || '',
5217
+ severity: '',
5218
+ };
5219
+ }
5220
+ // If a directory is provided, enrich with actual finding data (names, severity, etc.)
5221
+ if (options.directory) {
5222
+ const scanner = new index_1.HardeningScanner();
5223
+ const result = await scanner.scan({ targetDir: options.directory, autoFix: false, scanDepth: 'deep' });
5224
+ for (const finding of result.findings) {
5225
+ if (metadata[finding.checkId]) {
5226
+ metadata[finding.checkId].name = finding.name;
5227
+ metadata[finding.checkId].category = finding.category;
5228
+ metadata[finding.checkId].severity = finding.severity;
5229
+ }
5230
+ else {
5231
+ metadata[finding.checkId] = {
5232
+ checkId: finding.checkId,
5233
+ name: finding.name,
5234
+ category: finding.category,
5235
+ attackClass: getAttackClass(finding.checkId) || '',
5236
+ severity: finding.severity,
5237
+ };
5238
+ }
5214
5239
  }
5215
5240
  }
5216
5241
  writeJsonStdout({ totalChecks: Object.keys(metadata).length, checks: metadata });
@@ -5706,6 +5731,16 @@ function looksLikeGitHubRepo(target) {
5706
5731
  }
5707
5732
  return false;
5708
5733
  }
5734
+ /**
5735
+ * Detect whether a string is an HTTP(S) URL that is NOT a GitHub repo.
5736
+ * GitHub URLs are handled by looksLikeGitHubRepo; this catches everything else:
5737
+ * GitLab, Bitbucket, self-hosted git, raw tarballs, zip archives, single files, etc.
5738
+ */
5739
+ function looksLikeRawUrl(target) {
5740
+ if (looksLikeGitHubRepo(target))
5741
+ return false;
5742
+ return /^https?:\/\/.+/.test(target);
5743
+ }
5709
5744
  /**
5710
5745
  * Parse a GitHub target into org/repo and optional clone URL.
5711
5746
  * Returns { org, repo, cloneUrl }
@@ -6223,6 +6258,172 @@ async function checkPyPiPackage(target, options) {
6223
6258
  await rm(tempDir, { recursive: true, force: true });
6224
6259
  }
6225
6260
  }
6261
+ /**
6262
+ * Fetch a raw URL, detect its type (git repo, tarball, zip, or single file),
6263
+ * download to a temp dir, run full HMA + NanoMind scan, display results, clean up.
6264
+ */
6265
+ async function checkRawUrl(url, options) {
6266
+ const { mkdtemp, rm, writeFile, readdir } = await Promise.resolve().then(() => __importStar(require('node:fs/promises')));
6267
+ const { tmpdir } = await Promise.resolve().then(() => __importStar(require('node:os')));
6268
+ const { join, basename } = await Promise.resolve().then(() => __importStar(require('node:path')));
6269
+ const { execFile } = await Promise.resolve().then(() => __importStar(require('node:child_process')));
6270
+ const { promisify } = await Promise.resolve().then(() => __importStar(require('node:util')));
6271
+ const execAsync = promisify(execFile);
6272
+ const tempDir = await mkdtemp(join(tmpdir(), 'hma-check-url-'));
6273
+ let scanDir = tempDir;
6274
+ let displayName = url;
6275
+ try {
6276
+ // Git clone for known forge URLs and .git suffix
6277
+ const isGitUrl = url.endsWith('.git')
6278
+ || /^https?:\/\/(gitlab\.com|bitbucket\.org|codeberg\.org|gitea\.com|sr\.ht)\//.test(url);
6279
+ if (isGitUrl) {
6280
+ const repoName = basename(url.replace(/\.git$/, '')) || 'repo';
6281
+ displayName = url.replace(/^https?:\/\//, '').replace(/\.git$/, '');
6282
+ if (!options.json && !globalCiMode) {
6283
+ console.error(`Cloning ${displayName}...`);
6284
+ }
6285
+ await execAsync('git', ['clone', '--depth', '1', '--single-branch', url, join(tempDir, repoName)], { timeout: 120000 });
6286
+ scanDir = join(tempDir, repoName);
6287
+ }
6288
+ else {
6289
+ // HTTP fetch — use HEAD to determine content type
6290
+ if (!options.json && !globalCiMode) {
6291
+ console.error(`Fetching ${url}...`);
6292
+ }
6293
+ const headRes = await fetch(url, { method: 'HEAD', redirect: 'follow' });
6294
+ if (!headRes.ok) {
6295
+ console.error(`Error: HTTP ${headRes.status} fetching "${url}".`);
6296
+ process.exit(1);
6297
+ }
6298
+ const contentType = headRes.headers.get('content-type') || '';
6299
+ const finalUrl = headRes.url;
6300
+ const fileName = basename(new URL(finalUrl).pathname) || 'download';
6301
+ const isArchive = /\.(tar\.gz|tgz|tar\.bz2|tar\.xz|zip)$/i.test(fileName)
6302
+ || contentType.includes('gzip')
6303
+ || contentType.includes('tar')
6304
+ || contentType.includes('zip')
6305
+ || contentType.includes('compressed');
6306
+ const bodyRes = await fetch(finalUrl, { redirect: 'follow' });
6307
+ if (!bodyRes.ok || !bodyRes.body) {
6308
+ console.error(`Error: Failed to download "${url}" (HTTP ${bodyRes.status}).`);
6309
+ process.exit(1);
6310
+ }
6311
+ const buffer = Buffer.from(await bodyRes.arrayBuffer());
6312
+ if (isArchive) {
6313
+ const archivePath = join(tempDir, fileName);
6314
+ await writeFile(archivePath, buffer);
6315
+ const extractDir = join(tempDir, 'extracted');
6316
+ await execAsync('mkdir', ['-p', extractDir]);
6317
+ if (/\.(tar\.gz|tgz)$/i.test(fileName) || contentType.includes('gzip') || contentType.includes('tar')) {
6318
+ await execAsync('tar', ['xzf', archivePath, '-C', extractDir], { timeout: 30000 });
6319
+ }
6320
+ else if (/\.tar\.bz2$/i.test(fileName)) {
6321
+ await execAsync('tar', ['xjf', archivePath, '-C', extractDir], { timeout: 30000 });
6322
+ }
6323
+ else if (/\.tar\.xz$/i.test(fileName)) {
6324
+ await execAsync('tar', ['xJf', archivePath, '-C', extractDir], { timeout: 30000 });
6325
+ }
6326
+ else if (/\.zip$/i.test(fileName)) {
6327
+ await execAsync('unzip', ['-q', archivePath, '-d', extractDir], { timeout: 30000 });
6328
+ }
6329
+ // If extraction produced a single directory, scan that
6330
+ const entries = await readdir(extractDir);
6331
+ if (entries.length === 1) {
6332
+ const { statSync } = await Promise.resolve().then(() => __importStar(require('node:fs')));
6333
+ const innerPath = join(extractDir, entries[0]);
6334
+ if (statSync(innerPath).isDirectory()) {
6335
+ scanDir = innerPath;
6336
+ }
6337
+ else {
6338
+ scanDir = extractDir;
6339
+ }
6340
+ }
6341
+ else {
6342
+ scanDir = extractDir;
6343
+ }
6344
+ displayName = fileName;
6345
+ }
6346
+ else {
6347
+ // Single file: save for scanning
6348
+ await writeFile(join(tempDir, fileName), buffer);
6349
+ scanDir = tempDir;
6350
+ displayName = fileName;
6351
+ }
6352
+ }
6353
+ // Run full HMA scan + NanoMind
6354
+ const scanner = new index_1.HardeningScanner();
6355
+ const result = await scanner.scan({ targetDir: scanDir, autoFix: false });
6356
+ try {
6357
+ const { orchestrateNanoMind } = await Promise.resolve().then(() => __importStar(require('./nanomind-core/orchestrate.js')));
6358
+ const nmResult = await orchestrateNanoMind(scanDir, result.findings, { silent: true });
6359
+ const refiltered = await scanner.reapplyIgnoreFilters(nmResult.mergedFindings, scanDir);
6360
+ const projectType = result.projectType || 'library';
6361
+ result.findings = refiltered.filter((f) => !f.passed && f.file && scanner.findingAppliesTo(f, projectType));
6362
+ result.score = scanner.calculateScore(result.findings.filter((f) => !f.passed && !f.fixed)).score;
6363
+ }
6364
+ catch {
6365
+ // NanoMind unavailable — use base scan results
6366
+ }
6367
+ const failed = result.findings.filter(f => !f.passed);
6368
+ const critical = failed.filter(f => f.severity === 'critical');
6369
+ const high = failed.filter(f => f.severity === 'high');
6370
+ const medium = failed.filter(f => f.severity === 'medium');
6371
+ const low = failed.filter(f => f.severity === 'low');
6372
+ if (options.json) {
6373
+ writeJsonStdout({
6374
+ name: displayName,
6375
+ url,
6376
+ type: 'raw-url',
6377
+ source: 'local-scan',
6378
+ projectType: result.projectType,
6379
+ score: result.score,
6380
+ maxScore: result.maxScore,
6381
+ findings: result.findings,
6382
+ });
6383
+ return;
6384
+ }
6385
+ // Display results
6386
+ const scoreRatio = result.score / result.maxScore;
6387
+ const scoreColor = scoreRatio >= 0.7 ? colors.green : scoreRatio >= 0.4 ? colors.yellow : colors.red;
6388
+ console.log(`\n ${displayName} ${colors.dim}(URL)${RESET()}`);
6389
+ console.log(` Type: ${result.projectType}`);
6390
+ console.log(` Score: ${scoreColor}${result.score}/${result.maxScore}${RESET()}`);
6391
+ console.log(` Findings: ${critical.length} critical, ${high.length} high, ${medium.length} medium, ${low.length} low`);
6392
+ displayCheckFindings(failed, !!options.verbose);
6393
+ // Community contribution (auto-share if opted in, no first-time prompt for URLs)
6394
+ if (process.stdin.isTTY && !globalCiMode) {
6395
+ if (isContributeEnabled()) {
6396
+ flushPendingScans();
6397
+ const ok = await publishToRegistry(displayName, result);
6398
+ if (!ok)
6399
+ queuePendingScan(displayName, result);
6400
+ }
6401
+ }
6402
+ console.log(`\n Full project scan: ${CLI_PREFIX} secure <dir>`);
6403
+ console.log();
6404
+ if (critical.length > 0 || high.length > 0)
6405
+ process.exit(1);
6406
+ }
6407
+ catch (err) {
6408
+ const message = err instanceof Error ? err.message : String(err);
6409
+ if (message.includes('128') || message.includes('not found') || message.includes('Repository not found')) {
6410
+ console.error(`Error: Could not clone repository from "${url}".`);
6411
+ console.error(`\nVerify the URL is accessible and contains a git repository.`);
6412
+ }
6413
+ else if (message.includes('timeout') || message.includes('Timeout')) {
6414
+ console.error(`Error: Fetching "${url}" timed out. The target may be too large.`);
6415
+ console.error(`\nTry downloading manually and scanning the local path:`);
6416
+ console.error(` ${CLI_PREFIX} check ./downloaded-dir/`);
6417
+ }
6418
+ else {
6419
+ console.error(`Error scanning URL: ${message}`);
6420
+ }
6421
+ process.exit(1);
6422
+ }
6423
+ finally {
6424
+ await rm(tempDir, { recursive: true, force: true });
6425
+ }
6426
+ }
6226
6427
  async function checkNpmPackage(name, options) {
6227
6428
  // Step 1: Check registry for existing trust data
6228
6429
  if (!options.offline) {