@harness-engineering/cli 1.8.1 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/agents/skills/claude-code/cleanup-dead-code/SKILL.md +3 -3
  2. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +13 -1
  3. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +45 -4
  4. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +36 -15
  5. package/dist/agents/skills/claude-code/harness-codebase-cleanup/SKILL.md +1 -1
  6. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +68 -11
  7. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +41 -3
  8. package/dist/agents/skills/claude-code/harness-pre-commit-review/SKILL.md +28 -3
  9. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +14 -2
  10. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +18 -2
  11. package/dist/agents/skills/gemini-cli/cleanup-dead-code/SKILL.md +3 -3
  12. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +13 -1
  13. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +45 -4
  14. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +36 -15
  15. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/SKILL.md +1 -1
  16. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +68 -11
  17. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +41 -3
  18. package/dist/agents/skills/gemini-cli/harness-pre-commit-review/SKILL.md +28 -3
  19. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +14 -2
  20. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +18 -2
  21. package/dist/bin/harness.js +7 -7
  22. package/dist/{chunk-RT2LYQHF.js → chunk-6JIT7CEM.js} +1 -1
  23. package/dist/{chunk-E2RTDBMG.js → chunk-CGSHUJES.js} +138 -86
  24. package/dist/{chunk-KJANDVVC.js → chunk-RTPHUDZS.js} +28 -17
  25. package/dist/{chunk-ACMDUQJG.js → chunk-ULSRSP53.js} +2 -1
  26. package/dist/{create-skill-NZDLMMR6.js → create-skill-UZOHMXRU.js} +1 -1
  27. package/dist/{dist-CCM3L3UE.js → dist-C5PYIQPF.js} +1 -1
  28. package/dist/{dist-K6KTTN3I.js → dist-I7DB5VKB.js} +237 -0
  29. package/dist/index.js +4 -4
  30. package/dist/validate-cross-check-VG573VZO.js +7 -0
  31. package/package.json +6 -5
  32. package/dist/validate-cross-check-ZGKFQY57.js +0 -7
@@ -4969,6 +4969,15 @@ var FAILURES_FILE = "failures.md";
4969
4969
  var HANDOFF_FILE = "handoff.json";
4970
4970
  var GATE_CONFIG_FILE = "gate.json";
4971
4971
  var INDEX_FILE2 = "index.json";
4972
+ var MAX_CACHE_ENTRIES = 8;
4973
+ var learningsCacheMap = /* @__PURE__ */ new Map();
4974
+ var failuresCacheMap = /* @__PURE__ */ new Map();
4975
+ function evictIfNeeded(map) {
4976
+ if (map.size > MAX_CACHE_ENTRIES) {
4977
+ const oldest = map.keys().next().value;
4978
+ if (oldest !== void 0) map.delete(oldest);
4979
+ }
4980
+ }
4972
4981
  async function getStateDir(projectPath, stream) {
4973
4982
  const streamsIndexPath = path3.join(projectPath, HARNESS_DIR2, "streams", INDEX_FILE2);
4974
4983
  const hasStreams = fs4.existsSync(streamsIndexPath);
@@ -5066,25 +5075,35 @@ async function loadRelevantLearnings(projectPath, skillName, stream) {
5066
5075
  if (!fs4.existsSync(learningsPath)) {
5067
5076
  return Ok([]);
5068
5077
  }
5069
- const content = fs4.readFileSync(learningsPath, "utf-8");
5070
- const lines = content.split("\n");
5071
- const entries = [];
5072
- let currentBlock = [];
5073
- for (const line of lines) {
5074
- if (line.startsWith("# ")) continue;
5075
- const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5076
- const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5077
- if (isDatedBullet || isHeading) {
5078
- if (currentBlock.length > 0) {
5079
- entries.push(currentBlock.join("\n"));
5078
+ const stats = fs4.statSync(learningsPath);
5079
+ const cacheKey = learningsPath;
5080
+ const cached = learningsCacheMap.get(cacheKey);
5081
+ let entries;
5082
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
5083
+ entries = cached.entries;
5084
+ } else {
5085
+ const content = fs4.readFileSync(learningsPath, "utf-8");
5086
+ const lines = content.split("\n");
5087
+ entries = [];
5088
+ let currentBlock = [];
5089
+ for (const line of lines) {
5090
+ if (line.startsWith("# ")) continue;
5091
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5092
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5093
+ if (isDatedBullet || isHeading) {
5094
+ if (currentBlock.length > 0) {
5095
+ entries.push(currentBlock.join("\n"));
5096
+ }
5097
+ currentBlock = [line];
5098
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
5099
+ currentBlock.push(line);
5080
5100
  }
5081
- currentBlock = [line];
5082
- } else if (line.trim() !== "" && currentBlock.length > 0) {
5083
- currentBlock.push(line);
5084
5101
  }
5085
- }
5086
- if (currentBlock.length > 0) {
5087
- entries.push(currentBlock.join("\n"));
5102
+ if (currentBlock.length > 0) {
5103
+ entries.push(currentBlock.join("\n"));
5104
+ }
5105
+ learningsCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
5106
+ evictIfNeeded(learningsCacheMap);
5088
5107
  }
5089
5108
  if (!skillName) {
5090
5109
  return Ok(entries);
@@ -5135,6 +5154,12 @@ async function loadFailures(projectPath, stream) {
5135
5154
  if (!fs4.existsSync(failuresPath)) {
5136
5155
  return Ok([]);
5137
5156
  }
5157
+ const stats = fs4.statSync(failuresPath);
5158
+ const cacheKey = failuresPath;
5159
+ const cached = failuresCacheMap.get(cacheKey);
5160
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
5161
+ return Ok(cached.entries);
5162
+ }
5138
5163
  const content = fs4.readFileSync(failuresPath, "utf-8");
5139
5164
  const entries = [];
5140
5165
  for (const line of content.split("\n")) {
@@ -5148,6 +5173,8 @@ async function loadFailures(projectPath, stream) {
5148
5173
  });
5149
5174
  }
5150
5175
  }
5176
+ failuresCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
5177
+ evictIfNeeded(failuresCacheMap);
5151
5178
  return Ok(entries);
5152
5179
  } catch (error) {
5153
5180
  return Err(
@@ -6199,14 +6226,22 @@ async function runCIChecks(input) {
6199
6226
  const { projectRoot, config, skip = [], failOn = "error" } = input;
6200
6227
  try {
6201
6228
  const checks = [];
6202
- for (const name of ALL_CHECKS) {
6203
- if (skip.includes(name)) {
6204
- checks.push({ name, status: "skip", issues: [], durationMs: 0 });
6205
- } else {
6206
- const result = await runSingleCheck(name, projectRoot, config);
6207
- checks.push(result);
6208
- }
6229
+ const skippedSet = new Set(skip);
6230
+ if (skippedSet.has("validate")) {
6231
+ checks.push({ name: "validate", status: "skip", issues: [], durationMs: 0 });
6232
+ } else {
6233
+ checks.push(await runSingleCheck("validate", projectRoot, config));
6209
6234
  }
6235
+ const remainingChecks = ALL_CHECKS.slice(1);
6236
+ const phase2Results = await Promise.all(
6237
+ remainingChecks.map(async (name) => {
6238
+ if (skippedSet.has(name)) {
6239
+ return { name, status: "skip", issues: [], durationMs: 0 };
6240
+ }
6241
+ return runSingleCheck(name, projectRoot, config);
6242
+ })
6243
+ );
6244
+ checks.push(...phase2Results);
6210
6245
  const summary = buildSummary(checks);
6211
6246
  const exitCode = determineExitCode(summary, failOn);
6212
6247
  const report = {
@@ -6328,76 +6363,93 @@ async function runMechanicalChecks(options) {
6328
6363
  });
6329
6364
  }
6330
6365
  }
6366
+ const parallelChecks = [];
6331
6367
  if (!skip.includes("check-docs")) {
6332
- try {
6333
- const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6334
- const result = await checkDocCoverage("project", { docsDir });
6335
- if (!result.ok) {
6336
- statuses["check-docs"] = "warn";
6337
- findings.push({
6338
- tool: "check-docs",
6339
- file: docsDir,
6340
- message: result.error.message,
6341
- severity: "warning"
6342
- });
6343
- } else if (result.value.gaps && result.value.gaps.length > 0) {
6344
- statuses["check-docs"] = "warn";
6345
- for (const gap of result.value.gaps) {
6346
- findings.push({
6368
+ parallelChecks.push(
6369
+ (async () => {
6370
+ const localFindings = [];
6371
+ try {
6372
+ const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6373
+ const result = await checkDocCoverage("project", { docsDir });
6374
+ if (!result.ok) {
6375
+ statuses["check-docs"] = "warn";
6376
+ localFindings.push({
6377
+ tool: "check-docs",
6378
+ file: docsDir,
6379
+ message: result.error.message,
6380
+ severity: "warning"
6381
+ });
6382
+ } else if (result.value.gaps && result.value.gaps.length > 0) {
6383
+ statuses["check-docs"] = "warn";
6384
+ for (const gap of result.value.gaps) {
6385
+ localFindings.push({
6386
+ tool: "check-docs",
6387
+ file: gap.file,
6388
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6389
+ severity: "warning"
6390
+ });
6391
+ }
6392
+ } else {
6393
+ statuses["check-docs"] = "pass";
6394
+ }
6395
+ } catch (err) {
6396
+ statuses["check-docs"] = "warn";
6397
+ localFindings.push({
6347
6398
  tool: "check-docs",
6348
- file: gap.file,
6349
- message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6399
+ file: path6.join(projectRoot, "docs"),
6400
+ message: err instanceof Error ? err.message : String(err),
6350
6401
  severity: "warning"
6351
6402
  });
6352
6403
  }
6353
- } else {
6354
- statuses["check-docs"] = "pass";
6355
- }
6356
- } catch (err) {
6357
- statuses["check-docs"] = "warn";
6358
- findings.push({
6359
- tool: "check-docs",
6360
- file: path6.join(projectRoot, "docs"),
6361
- message: err instanceof Error ? err.message : String(err),
6362
- severity: "warning"
6363
- });
6364
- }
6404
+ return localFindings;
6405
+ })()
6406
+ );
6365
6407
  }
6366
6408
  if (!skip.includes("security-scan")) {
6367
- try {
6368
- const securityConfig = parseSecurityConfig(config.security);
6369
- if (!securityConfig.enabled) {
6370
- statuses["security-scan"] = "skip";
6371
- } else {
6372
- const scanner = new SecurityScanner(securityConfig);
6373
- scanner.configureForProject(projectRoot);
6374
- const filesToScan = changedFiles ?? [];
6375
- const scanResult = await scanner.scanFiles(filesToScan);
6376
- if (scanResult.findings.length > 0) {
6377
- statuses["security-scan"] = "warn";
6378
- for (const f of scanResult.findings) {
6379
- findings.push({
6380
- tool: "security-scan",
6381
- file: f.file,
6382
- line: f.line,
6383
- ruleId: f.ruleId,
6384
- message: f.message,
6385
- severity: f.severity === "info" ? "warning" : f.severity
6386
- });
6409
+ parallelChecks.push(
6410
+ (async () => {
6411
+ const localFindings = [];
6412
+ try {
6413
+ const securityConfig = parseSecurityConfig(config.security);
6414
+ if (!securityConfig.enabled) {
6415
+ statuses["security-scan"] = "skip";
6416
+ } else {
6417
+ const scanner = new SecurityScanner(securityConfig);
6418
+ scanner.configureForProject(projectRoot);
6419
+ const filesToScan = changedFiles ?? [];
6420
+ const scanResult = await scanner.scanFiles(filesToScan);
6421
+ if (scanResult.findings.length > 0) {
6422
+ statuses["security-scan"] = "warn";
6423
+ for (const f of scanResult.findings) {
6424
+ localFindings.push({
6425
+ tool: "security-scan",
6426
+ file: f.file,
6427
+ line: f.line,
6428
+ ruleId: f.ruleId,
6429
+ message: f.message,
6430
+ severity: f.severity === "info" ? "warning" : f.severity
6431
+ });
6432
+ }
6433
+ } else {
6434
+ statuses["security-scan"] = "pass";
6435
+ }
6387
6436
  }
6388
- } else {
6389
- statuses["security-scan"] = "pass";
6437
+ } catch (err) {
6438
+ statuses["security-scan"] = "warn";
6439
+ localFindings.push({
6440
+ tool: "security-scan",
6441
+ file: projectRoot,
6442
+ message: err instanceof Error ? err.message : String(err),
6443
+ severity: "warning"
6444
+ });
6390
6445
  }
6391
- }
6392
- } catch (err) {
6393
- statuses["security-scan"] = "warn";
6394
- findings.push({
6395
- tool: "security-scan",
6396
- file: projectRoot,
6397
- message: err instanceof Error ? err.message : String(err),
6398
- severity: "warning"
6399
- });
6400
- }
6446
+ return localFindings;
6447
+ })()
6448
+ );
6449
+ }
6450
+ const parallelResults = await Promise.all(parallelChecks);
6451
+ for (const result of parallelResults) {
6452
+ findings.push(...result);
6401
6453
  }
6402
6454
  const hasErrors = findings.some((f) => f.severity === "error");
6403
6455
  const stopPipeline = statuses.validate === "fail" || statuses["check-deps"] === "fail";
@@ -8200,7 +8252,7 @@ function getUpdateNotification(currentVersion) {
8200
8252
  return `Update available: v${currentVersion} -> v${state.latestVersion}
8201
8253
  Run "harness update" to upgrade.`;
8202
8254
  }
8203
- var VERSION = "0.8.0";
8255
+ var VERSION = "1.8.2";
8204
8256
 
8205
8257
  export {
8206
8258
  Ok,
@@ -6,7 +6,6 @@ import {
6
6
  Ok,
7
7
  SecurityScanner,
8
8
  TypeScriptParser,
9
- VERSION,
10
9
  appendLearning,
11
10
  applyFixes,
12
11
  archiveStream,
@@ -32,7 +31,7 @@ import {
32
31
  validateAgentsMap,
33
32
  validateDependencies,
34
33
  validateKnowledgeMap
35
- } from "./chunk-E2RTDBMG.js";
34
+ } from "./chunk-CGSHUJES.js";
36
35
  import {
37
36
  CLIError,
38
37
  ExitCode,
@@ -40,11 +39,22 @@ import {
40
39
  createCreateSkillCommand,
41
40
  handleError,
42
41
  logger
43
- } from "./chunk-ACMDUQJG.js";
42
+ } from "./chunk-ULSRSP53.js";
44
43
 
45
44
  // src/index.ts
46
45
  import { Command as Command43 } from "commander";
47
46
 
47
+ // src/version.ts
48
+ import { createRequire } from "module";
49
+ var require_ = createRequire(import.meta.url);
50
+ var resolved;
51
+ try {
52
+ resolved = require_("../package.json").version ?? "0.0.0";
53
+ } catch {
54
+ resolved = "0.0.0";
55
+ }
56
+ var CLI_VERSION = resolved;
57
+
48
58
  // src/commands/validate.ts
49
59
  import { Command } from "commander";
50
60
  import * as path2 from "path";
@@ -359,7 +369,7 @@ function createValidateCommand() {
359
369
  process.exit(result.error.exitCode);
360
370
  }
361
371
  if (opts.crossCheck) {
362
- const { runCrossCheck: runCrossCheck2 } = await import("./validate-cross-check-ZGKFQY57.js");
372
+ const { runCrossCheck: runCrossCheck2 } = await import("./validate-cross-check-VG573VZO.js");
363
373
  const cwd = process.cwd();
364
374
  const specsDir = path2.join(cwd, "docs", "specs");
365
375
  const plansDir = path2.join(cwd, "docs", "plans");
@@ -725,7 +735,7 @@ function createPerfCommand() {
725
735
  perf.command("bench [glob]").description("Run benchmarks via vitest bench").action(async (glob2, _opts, cmd) => {
726
736
  const globalOpts = cmd.optsWithGlobals();
727
737
  const cwd = process.cwd();
728
- const { BenchmarkRunner } = await import("./dist-CCM3L3UE.js");
738
+ const { BenchmarkRunner } = await import("./dist-C5PYIQPF.js");
729
739
  const runner = new BenchmarkRunner();
730
740
  const benchFiles = runner.discover(cwd, glob2);
731
741
  if (benchFiles.length === 0) {
@@ -794,7 +804,7 @@ Results (${result.results.length} benchmarks):`);
794
804
  baselines.command("update").description("Update baselines from latest benchmark run").action(async (_opts, cmd) => {
795
805
  const globalOpts = cmd.optsWithGlobals();
796
806
  const cwd = process.cwd();
797
- const { BenchmarkRunner } = await import("./dist-CCM3L3UE.js");
807
+ const { BenchmarkRunner } = await import("./dist-C5PYIQPF.js");
798
808
  const runner = new BenchmarkRunner();
799
809
  const manager = new BaselineManager(cwd);
800
810
  logger.info("Running benchmarks to update baselines...");
@@ -822,7 +832,7 @@ Results (${result.results.length} benchmarks):`);
822
832
  perf.command("report").description("Full performance report with metrics, trends, and hotspots").action(async (_opts, cmd) => {
823
833
  const globalOpts = cmd.optsWithGlobals();
824
834
  const cwd = process.cwd();
825
- const { EntropyAnalyzer: EntropyAnalyzer2 } = await import("./dist-CCM3L3UE.js");
835
+ const { EntropyAnalyzer: EntropyAnalyzer2 } = await import("./dist-C5PYIQPF.js");
826
836
  const analyzer = new EntropyAnalyzer2({
827
837
  rootDir: path6.resolve(cwd),
828
838
  analyze: { complexity: true, coupling: true }
@@ -1095,9 +1105,9 @@ var TemplateEngine = class {
1095
1105
  files = this.mergeFileLists(files, frameworkFiles);
1096
1106
  }
1097
1107
  files = files.filter((f) => f.relativePath !== "template.json");
1098
- const resolved = { metadata, files };
1099
- if (overlayMetadata !== void 0) resolved.overlayMetadata = overlayMetadata;
1100
- return Ok(resolved);
1108
+ const resolved2 = { metadata, files };
1109
+ if (overlayMetadata !== void 0) resolved2.overlayMetadata = overlayMetadata;
1110
+ return Ok(resolved2);
1101
1111
  }
1102
1112
  render(template, context) {
1103
1113
  const rendered = [];
@@ -2514,7 +2524,7 @@ async function runAdd(componentType, name, options) {
2514
2524
  break;
2515
2525
  }
2516
2526
  case "skill": {
2517
- const { generateSkillFiles: generateSkillFiles2 } = await import("./create-skill-NZDLMMR6.js");
2527
+ const { generateSkillFiles: generateSkillFiles2 } = await import("./create-skill-UZOHMXRU.js");
2518
2528
  generateSkillFiles2({
2519
2529
  name,
2520
2530
  description: `${name} skill`,
@@ -5084,7 +5094,7 @@ function createGenerateCommand3() {
5084
5094
  import { Command as Command39 } from "commander";
5085
5095
  import * as path37 from "path";
5086
5096
  async function runScan(projectPath) {
5087
- const { GraphStore, CodeIngestor, TopologicalLinker, KnowledgeIngestor, GitIngestor } = await import("./dist-K6KTTN3I.js");
5097
+ const { GraphStore, CodeIngestor, TopologicalLinker, KnowledgeIngestor, GitIngestor } = await import("./dist-I7DB5VKB.js");
5088
5098
  const store = new GraphStore();
5089
5099
  const start = Date.now();
5090
5100
  await new CodeIngestor(store).ingest(projectPath);
@@ -5165,7 +5175,7 @@ async function runIngest(projectPath, source, opts) {
5165
5175
  SyncManager,
5166
5176
  JiraConnector,
5167
5177
  SlackConnector
5168
- } = await import("./dist-K6KTTN3I.js");
5178
+ } = await import("./dist-I7DB5VKB.js");
5169
5179
  const graphDir = path38.join(projectPath, ".harness", "graph");
5170
5180
  const store = new GraphStore();
5171
5181
  await store.load(graphDir);
@@ -5258,7 +5268,7 @@ function createIngestCommand() {
5258
5268
  import { Command as Command41 } from "commander";
5259
5269
  import * as path39 from "path";
5260
5270
  async function runQuery(projectPath, rootNodeId, opts) {
5261
- const { GraphStore, ContextQL } = await import("./dist-K6KTTN3I.js");
5271
+ const { GraphStore, ContextQL } = await import("./dist-I7DB5VKB.js");
5262
5272
  const store = new GraphStore();
5263
5273
  const graphDir = path39.join(projectPath, ".harness", "graph");
5264
5274
  const loaded = await store.load(graphDir);
@@ -5307,7 +5317,7 @@ import { Command as Command42 } from "commander";
5307
5317
  // src/commands/graph/status.ts
5308
5318
  import * as path40 from "path";
5309
5319
  async function runGraphStatus(projectPath) {
5310
- const { GraphStore } = await import("./dist-K6KTTN3I.js");
5320
+ const { GraphStore } = await import("./dist-I7DB5VKB.js");
5311
5321
  const graphDir = path40.join(projectPath, ".harness", "graph");
5312
5322
  const store = new GraphStore();
5313
5323
  const loaded = await store.load(graphDir);
@@ -5347,7 +5357,7 @@ async function runGraphStatus(projectPath) {
5347
5357
  // src/commands/graph/export.ts
5348
5358
  import * as path41 from "path";
5349
5359
  async function runGraphExport(projectPath, format) {
5350
- const { GraphStore } = await import("./dist-K6KTTN3I.js");
5360
+ const { GraphStore } = await import("./dist-I7DB5VKB.js");
5351
5361
  const graphDir = path41.join(projectPath, ".harness", "graph");
5352
5362
  const store = new GraphStore();
5353
5363
  const loaded = await store.load(graphDir);
@@ -5425,7 +5435,7 @@ function createGraphCommand() {
5425
5435
  // src/index.ts
5426
5436
  function createProgram() {
5427
5437
  const program = new Command43();
5428
- program.name("harness").description("CLI for Harness Engineering toolkit").version(VERSION).option("-c, --config <path>", "Path to config file").option("--json", "Output as JSON").option("--verbose", "Verbose output").option("--quiet", "Minimal output");
5438
+ program.name("harness").description("CLI for Harness Engineering toolkit").version(CLI_VERSION).option("-c, --config <path>", "Path to config file").option("--json", "Output as JSON").option("--verbose", "Verbose output").option("--quiet", "Minimal output");
5429
5439
  program.addCommand(createValidateCommand());
5430
5440
  program.addCommand(createCheckDepsCommand());
5431
5441
  program.addCommand(createCheckDocsCommand());
@@ -5457,6 +5467,7 @@ function createProgram() {
5457
5467
  }
5458
5468
 
5459
5469
  export {
5470
+ CLI_VERSION,
5460
5471
  findConfigFile,
5461
5472
  loadConfig,
5462
5473
  resolveConfig,
@@ -39,7 +39,8 @@ var ALLOWED_TRIGGERS = [
39
39
  "on_project_init",
40
40
  "on_review",
41
41
  "on_milestone",
42
- "on_task_complete"
42
+ "on_task_complete",
43
+ "on_doc_check"
43
44
  ];
44
45
  var ALLOWED_PLATFORMS = ["claude-code", "gemini-cli"];
45
46
  var ALLOWED_COGNITIVE_MODES = [
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  createCreateSkillCommand,
3
3
  generateSkillFiles
4
- } from "./chunk-ACMDUQJG.js";
4
+ } from "./chunk-ULSRSP53.js";
5
5
  export {
6
6
  createCreateSkillCommand,
7
7
  generateSkillFiles
@@ -165,7 +165,7 @@ import {
165
165
  validateKnowledgeMap,
166
166
  validatePatternConfig,
167
167
  xssRules
168
- } from "./chunk-E2RTDBMG.js";
168
+ } from "./chunk-CGSHUJES.js";
169
169
  export {
170
170
  AGENT_DESCRIPTORS,
171
171
  ARCHITECTURE_DESCRIPTOR,