@harness-engineering/core 0.14.0 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -84,15 +84,15 @@ function validateConfig(data, schema) {
84
84
  let message = "Configuration validation failed";
85
85
  const suggestions = [];
86
86
  if (firstError) {
87
- const path22 = firstError.path.join(".");
88
- const pathDisplay = path22 ? ` at "${path22}"` : "";
87
+ const path23 = firstError.path.join(".");
88
+ const pathDisplay = path23 ? ` at "${path23}"` : "";
89
89
  if (firstError.code === "invalid_type") {
90
90
  const received = firstError.received;
91
91
  const expected = firstError.expected;
92
92
  if (received === "undefined") {
93
93
  code = "MISSING_FIELD";
94
94
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
95
- suggestions.push(`Field "${path22}" is required and must be of type "${expected}"`);
95
+ suggestions.push(`Field "${path23}" is required and must be of type "${expected}"`);
96
96
  } else {
97
97
  code = "INVALID_TYPE";
98
98
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -308,27 +308,27 @@ function extractSections(content) {
308
308
  }
309
309
  return sections.map((section) => buildAgentMapSection(section, lines));
310
310
  }
311
- function isExternalLink(path22) {
312
- return path22.startsWith("http://") || path22.startsWith("https://") || path22.startsWith("#") || path22.startsWith("mailto:");
311
+ function isExternalLink(path23) {
312
+ return path23.startsWith("http://") || path23.startsWith("https://") || path23.startsWith("#") || path23.startsWith("mailto:");
313
313
  }
314
314
  function resolveLinkPath(linkPath, baseDir) {
315
315
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
316
316
  }
317
- async function validateAgentsMap(path22 = "./AGENTS.md") {
318
- const contentResult = await readFileContent(path22);
317
+ async function validateAgentsMap(path23 = "./AGENTS.md") {
318
+ const contentResult = await readFileContent(path23);
319
319
  if (!contentResult.ok) {
320
320
  return Err(
321
321
  createError(
322
322
  "PARSE_ERROR",
323
323
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
324
- { path: path22 },
324
+ { path: path23 },
325
325
  ["Ensure the file exists", "Check file permissions"]
326
326
  )
327
327
  );
328
328
  }
329
329
  const content = contentResult.value;
330
330
  const sections = extractSections(content);
331
- const baseDir = dirname(path22);
331
+ const baseDir = dirname(path23);
332
332
  const sectionTitles = sections.map((s) => s.title);
333
333
  const missingSections = REQUIRED_SECTIONS.filter(
334
334
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -469,8 +469,8 @@ async function checkDocCoverage(domain, options = {}) {
469
469
 
470
470
  // src/context/knowledge-map.ts
471
471
  import { join as join2, basename as basename2 } from "path";
472
- function suggestFix(path22, existingFiles) {
473
- const targetName = basename2(path22).toLowerCase();
472
+ function suggestFix(path23, existingFiles) {
473
+ const targetName = basename2(path23).toLowerCase();
474
474
  const similar = existingFiles.find((file) => {
475
475
  const fileName = basename2(file).toLowerCase();
476
476
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -478,7 +478,7 @@ function suggestFix(path22, existingFiles) {
478
478
  if (similar) {
479
479
  return `Did you mean "${similar}"?`;
480
480
  }
481
- return `Create the file "${path22}" or remove the link`;
481
+ return `Create the file "${path23}" or remove the link`;
482
482
  }
483
483
  async function validateKnowledgeMap(rootDir = process.cwd()) {
484
484
  const agentsPath = join2(rootDir, "AGENTS.md");
@@ -830,8 +830,8 @@ function createBoundaryValidator(schema, name) {
830
830
  return Ok(result.data);
831
831
  }
832
832
  const suggestions = result.error.issues.map((issue) => {
833
- const path22 = issue.path.join(".");
834
- return path22 ? `${path22}: ${issue.message}` : issue.message;
833
+ const path23 = issue.path.join(".");
834
+ return path23 ? `${path23}: ${issue.message}` : issue.message;
835
835
  });
836
836
  return Err(
837
837
  createError(
@@ -1463,11 +1463,11 @@ function processExportListSpecifiers(exportDecl, exports) {
1463
1463
  var TypeScriptParser = class {
1464
1464
  name = "typescript";
1465
1465
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1466
- async parseFile(path22) {
1467
- const contentResult = await readFileContent(path22);
1466
+ async parseFile(path23) {
1467
+ const contentResult = await readFileContent(path23);
1468
1468
  if (!contentResult.ok) {
1469
1469
  return Err(
1470
- createParseError("NOT_FOUND", `File not found: ${path22}`, { path: path22 }, [
1470
+ createParseError("NOT_FOUND", `File not found: ${path23}`, { path: path23 }, [
1471
1471
  "Check that the file exists",
1472
1472
  "Verify the path is correct"
1473
1473
  ])
@@ -1477,7 +1477,7 @@ var TypeScriptParser = class {
1477
1477
  const ast = parse(contentResult.value, {
1478
1478
  loc: true,
1479
1479
  range: true,
1480
- jsx: path22.endsWith(".tsx"),
1480
+ jsx: path23.endsWith(".tsx"),
1481
1481
  errorOnUnknownASTType: false
1482
1482
  });
1483
1483
  return Ok({
@@ -1488,7 +1488,7 @@ var TypeScriptParser = class {
1488
1488
  } catch (e) {
1489
1489
  const error = e;
1490
1490
  return Err(
1491
- createParseError("SYNTAX_ERROR", `Failed to parse ${path22}: ${error.message}`, { path: path22 }, [
1491
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path23}: ${error.message}`, { path: path23 }, [
1492
1492
  "Check for syntax errors in the file",
1493
1493
  "Ensure valid TypeScript syntax"
1494
1494
  ])
@@ -1673,22 +1673,22 @@ function extractInlineRefs(content) {
1673
1673
  }
1674
1674
  return refs;
1675
1675
  }
1676
- async function parseDocumentationFile(path22) {
1677
- const contentResult = await readFileContent(path22);
1676
+ async function parseDocumentationFile(path23) {
1677
+ const contentResult = await readFileContent(path23);
1678
1678
  if (!contentResult.ok) {
1679
1679
  return Err(
1680
1680
  createEntropyError(
1681
1681
  "PARSE_ERROR",
1682
- `Failed to read documentation file: ${path22}`,
1683
- { file: path22 },
1682
+ `Failed to read documentation file: ${path23}`,
1683
+ { file: path23 },
1684
1684
  ["Check that the file exists"]
1685
1685
  )
1686
1686
  );
1687
1687
  }
1688
1688
  const content = contentResult.value;
1689
- const type = path22.endsWith(".md") ? "markdown" : "text";
1689
+ const type = path23.endsWith(".md") ? "markdown" : "text";
1690
1690
  return Ok({
1691
- path: path22,
1691
+ path: path23,
1692
1692
  type,
1693
1693
  content,
1694
1694
  codeBlocks: extractCodeBlocks(content),
@@ -4820,6 +4820,8 @@ var SESSION_INDEX_FILE = "index.md";
4820
4820
  var SUMMARY_FILE = "summary.md";
4821
4821
  var SESSION_STATE_FILE = "session-state.json";
4822
4822
  var ARCHIVE_DIR = "archive";
4823
+ var CONTENT_HASHES_FILE = "content-hashes.json";
4824
+ var EVENTS_FILE = "events.jsonl";
4823
4825
 
4824
4826
  // src/state/stream-resolver.ts
4825
4827
  var STREAMS_DIR = "streams";
@@ -5162,6 +5164,85 @@ async function saveState(projectPath, state, stream, session) {
5162
5164
  // src/state/learnings.ts
5163
5165
  import * as fs9 from "fs";
5164
5166
  import * as path6 from "path";
5167
+ import * as crypto from "crypto";
5168
+ function parseFrontmatter(line) {
5169
+ const match = line.match(/^<!--\s+hash:([a-f0-9]+)(?:\s+tags:([^\s]+))?\s+-->/);
5170
+ if (!match) return null;
5171
+ const hash = match[1];
5172
+ const tags = match[2] ? match[2].split(",").filter(Boolean) : [];
5173
+ return { hash, tags };
5174
+ }
5175
+ function computeEntryHash(text) {
5176
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 8);
5177
+ }
5178
+ function normalizeLearningContent(text) {
5179
+ let normalized = text;
5180
+ normalized = normalized.replace(/\d{4}-\d{2}-\d{2}/g, "");
5181
+ normalized = normalized.replace(/\[skill:[^\]]*\]/g, "");
5182
+ normalized = normalized.replace(/\[outcome:[^\]]*\]/g, "");
5183
+ normalized = normalized.replace(/^[\s]*[-*]\s+/gm, "");
5184
+ normalized = normalized.replace(/\*\*/g, "");
5185
+ normalized = normalized.replace(/:\s*/g, " ");
5186
+ normalized = normalized.toLowerCase();
5187
+ normalized = normalized.replace(/\s+/g, " ").trim();
5188
+ return normalized;
5189
+ }
5190
+ function computeContentHash(text) {
5191
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 16);
5192
+ }
5193
+ function loadContentHashes(stateDir) {
5194
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
5195
+ if (!fs9.existsSync(hashesPath)) return {};
5196
+ try {
5197
+ const raw = fs9.readFileSync(hashesPath, "utf-8");
5198
+ const parsed = JSON.parse(raw);
5199
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return {};
5200
+ return parsed;
5201
+ } catch {
5202
+ return {};
5203
+ }
5204
+ }
5205
+ function saveContentHashes(stateDir, index) {
5206
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
5207
+ fs9.writeFileSync(hashesPath, JSON.stringify(index, null, 2) + "\n");
5208
+ }
5209
+ function rebuildContentHashes(stateDir) {
5210
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
5211
+ if (!fs9.existsSync(learningsPath)) return {};
5212
+ const content = fs9.readFileSync(learningsPath, "utf-8");
5213
+ const lines = content.split("\n");
5214
+ const index = {};
5215
+ for (let i = 0; i < lines.length; i++) {
5216
+ const line = lines[i];
5217
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5218
+ if (isDatedBullet) {
5219
+ const learningMatch = line.match(/:\*\*\s*(.+)$/);
5220
+ if (learningMatch?.[1]) {
5221
+ const normalized = normalizeLearningContent(learningMatch[1]);
5222
+ const hash = computeContentHash(normalized);
5223
+ const dateMatch = line.match(/(\d{4}-\d{2}-\d{2})/);
5224
+ index[hash] = { date: dateMatch?.[1] ?? "", line: i + 1 };
5225
+ }
5226
+ }
5227
+ }
5228
+ saveContentHashes(stateDir, index);
5229
+ return index;
5230
+ }
5231
+ function extractIndexEntry(entry) {
5232
+ const lines = entry.split("\n");
5233
+ const summary = lines[0] ?? entry;
5234
+ const tags = [];
5235
+ const skillMatch = entry.match(/\[skill:([^\]]+)\]/);
5236
+ if (skillMatch?.[1]) tags.push(skillMatch[1]);
5237
+ const outcomeMatch = entry.match(/\[outcome:([^\]]+)\]/);
5238
+ if (outcomeMatch?.[1]) tags.push(outcomeMatch[1]);
5239
+ return {
5240
+ hash: computeEntryHash(entry),
5241
+ tags,
5242
+ summary,
5243
+ fullText: entry
5244
+ };
5245
+ }
5165
5246
  var learningsCacheMap = /* @__PURE__ */ new Map();
5166
5247
  function clearLearningsCache() {
5167
5248
  learningsCacheMap.clear();
@@ -5173,27 +5254,55 @@ async function appendLearning(projectPath, learning, skillName, outcome, stream,
5173
5254
  const stateDir = dirResult.value;
5174
5255
  const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
5175
5256
  fs9.mkdirSync(stateDir, { recursive: true });
5257
+ const normalizedContent = normalizeLearningContent(learning);
5258
+ const contentHash = computeContentHash(normalizedContent);
5259
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
5260
+ let contentHashes;
5261
+ if (fs9.existsSync(hashesPath)) {
5262
+ contentHashes = loadContentHashes(stateDir);
5263
+ if (Object.keys(contentHashes).length === 0 && fs9.existsSync(learningsPath)) {
5264
+ contentHashes = rebuildContentHashes(stateDir);
5265
+ }
5266
+ } else if (fs9.existsSync(learningsPath)) {
5267
+ contentHashes = rebuildContentHashes(stateDir);
5268
+ } else {
5269
+ contentHashes = {};
5270
+ }
5271
+ if (contentHashes[contentHash]) {
5272
+ return Ok(void 0);
5273
+ }
5176
5274
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
5177
- let entry;
5275
+ const fmTags = [];
5276
+ if (skillName) fmTags.push(skillName);
5277
+ if (outcome) fmTags.push(outcome);
5278
+ let bulletLine;
5178
5279
  if (skillName && outcome) {
5179
- entry = `
5180
- - **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}
5181
- `;
5280
+ bulletLine = `- **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}`;
5182
5281
  } else if (skillName) {
5183
- entry = `
5184
- - **${timestamp} [skill:${skillName}]:** ${learning}
5185
- `;
5282
+ bulletLine = `- **${timestamp} [skill:${skillName}]:** ${learning}`;
5186
5283
  } else {
5187
- entry = `
5188
- - **${timestamp}:** ${learning}
5189
- `;
5284
+ bulletLine = `- **${timestamp}:** ${learning}`;
5190
5285
  }
5286
+ const hash = crypto.createHash("sha256").update(bulletLine).digest("hex").slice(0, 8);
5287
+ const tagsStr = fmTags.length > 0 ? ` tags:${fmTags.join(",")}` : "";
5288
+ const frontmatter = `<!-- hash:${hash}${tagsStr} -->`;
5289
+ const entry = `
5290
+ ${frontmatter}
5291
+ ${bulletLine}
5292
+ `;
5293
+ let existingLineCount;
5191
5294
  if (!fs9.existsSync(learningsPath)) {
5192
5295
  fs9.writeFileSync(learningsPath, `# Learnings
5193
5296
  ${entry}`);
5297
+ existingLineCount = 1;
5194
5298
  } else {
5299
+ const existingContent = fs9.readFileSync(learningsPath, "utf-8");
5300
+ existingLineCount = existingContent.split("\n").length;
5195
5301
  fs9.appendFileSync(learningsPath, entry);
5196
5302
  }
5303
+ const bulletLine_lineNum = existingLineCount + 2;
5304
+ contentHashes[contentHash] = { date: timestamp ?? "", line: bulletLine_lineNum };
5305
+ saveContentHashes(stateDir, contentHashes);
5197
5306
  learningsCacheMap.delete(learningsPath);
5198
5307
  return Ok(void 0);
5199
5308
  } catch (error) {
@@ -5241,7 +5350,30 @@ function analyzeLearningPatterns(entries) {
5241
5350
  return patterns.sort((a, b) => b.count - a.count);
5242
5351
  }
5243
5352
  async function loadBudgetedLearnings(projectPath, options) {
5244
- const { intent, tokenBudget = 1e3, skill, session, stream } = options;
5353
+ const { intent, tokenBudget = 1e3, skill, session, stream, depth = "summary" } = options;
5354
+ if (depth === "index") {
5355
+ const indexEntries = [];
5356
+ if (session) {
5357
+ const sessionResult = await loadIndexEntries(projectPath, skill, stream, session);
5358
+ if (sessionResult.ok) indexEntries.push(...sessionResult.value);
5359
+ }
5360
+ const globalResult2 = await loadIndexEntries(projectPath, skill, stream);
5361
+ if (globalResult2.ok) {
5362
+ const sessionHashes = new Set(indexEntries.map((e) => e.hash));
5363
+ const uniqueGlobal = globalResult2.value.filter((e) => !sessionHashes.has(e.hash));
5364
+ indexEntries.push(...uniqueGlobal);
5365
+ }
5366
+ const budgeted2 = [];
5367
+ let totalTokens2 = 0;
5368
+ for (const entry of indexEntries) {
5369
+ const separator = budgeted2.length > 0 ? "\n" : "";
5370
+ const entryCost = estimateTokens(entry.summary + separator);
5371
+ if (totalTokens2 + entryCost > tokenBudget) break;
5372
+ budgeted2.push(entry.summary);
5373
+ totalTokens2 += entryCost;
5374
+ }
5375
+ return Ok(budgeted2);
5376
+ }
5245
5377
  const sortByRecencyAndRelevance = (entries) => {
5246
5378
  return [...entries].sort((a, b) => {
5247
5379
  const dateA = parseDateFromEntry(a) ?? "0000-00-00";
@@ -5260,7 +5392,9 @@ async function loadBudgetedLearnings(projectPath, options) {
5260
5392
  }
5261
5393
  const globalResult = await loadRelevantLearnings(projectPath, skill, stream);
5262
5394
  if (globalResult.ok) {
5263
- allEntries.push(...sortByRecencyAndRelevance(globalResult.value));
5395
+ const sessionSet = new Set(allEntries.map((e) => e.trim()));
5396
+ const uniqueGlobal = globalResult.value.filter((e) => !sessionSet.has(e.trim()));
5397
+ allEntries.push(...sortByRecencyAndRelevance(uniqueGlobal));
5264
5398
  }
5265
5399
  const budgeted = [];
5266
5400
  let totalTokens = 0;
@@ -5273,6 +5407,68 @@ async function loadBudgetedLearnings(projectPath, options) {
5273
5407
  }
5274
5408
  return Ok(budgeted);
5275
5409
  }
5410
+ async function loadIndexEntries(projectPath, skillName, stream, session) {
5411
+ try {
5412
+ const dirResult = await getStateDir(projectPath, stream, session);
5413
+ if (!dirResult.ok) return dirResult;
5414
+ const stateDir = dirResult.value;
5415
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
5416
+ if (!fs9.existsSync(learningsPath)) {
5417
+ return Ok([]);
5418
+ }
5419
+ const content = fs9.readFileSync(learningsPath, "utf-8");
5420
+ const lines = content.split("\n");
5421
+ const indexEntries = [];
5422
+ let pendingFrontmatter = null;
5423
+ let currentBlock = [];
5424
+ for (const line of lines) {
5425
+ if (line.startsWith("# ")) continue;
5426
+ const fm = parseFrontmatter(line);
5427
+ if (fm) {
5428
+ pendingFrontmatter = fm;
5429
+ continue;
5430
+ }
5431
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5432
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5433
+ if (isDatedBullet || isHeading) {
5434
+ if (pendingFrontmatter) {
5435
+ indexEntries.push({
5436
+ hash: pendingFrontmatter.hash,
5437
+ tags: pendingFrontmatter.tags,
5438
+ summary: line,
5439
+ fullText: ""
5440
+ // Placeholder — full text not loaded in index mode
5441
+ });
5442
+ pendingFrontmatter = null;
5443
+ } else {
5444
+ const idx = extractIndexEntry(line);
5445
+ indexEntries.push({
5446
+ hash: idx.hash,
5447
+ tags: idx.tags,
5448
+ summary: line,
5449
+ fullText: ""
5450
+ });
5451
+ }
5452
+ currentBlock = [line];
5453
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
5454
+ currentBlock.push(line);
5455
+ }
5456
+ }
5457
+ if (skillName) {
5458
+ const filtered = indexEntries.filter(
5459
+ (e) => e.tags.includes(skillName) || e.summary.includes(`[skill:${skillName}]`)
5460
+ );
5461
+ return Ok(filtered);
5462
+ }
5463
+ return Ok(indexEntries);
5464
+ } catch (error) {
5465
+ return Err(
5466
+ new Error(
5467
+ `Failed to load index entries: ${error instanceof Error ? error.message : String(error)}`
5468
+ )
5469
+ );
5470
+ }
5471
+ }
5276
5472
  async function loadRelevantLearnings(projectPath, skillName, stream, session) {
5277
5473
  try {
5278
5474
  const dirResult = await getStateDir(projectPath, stream, session);
@@ -5295,6 +5491,7 @@ async function loadRelevantLearnings(projectPath, skillName, stream, session) {
5295
5491
  let currentBlock = [];
5296
5492
  for (const line of lines) {
5297
5493
  if (line.startsWith("# ")) continue;
5494
+ if (/^<!--\s+hash:[a-f0-9]+/.test(line)) continue;
5298
5495
  const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5299
5496
  const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5300
5497
  if (isDatedBullet || isHeading) {
@@ -5404,6 +5601,68 @@ async function pruneLearnings(projectPath, stream) {
5404
5601
  );
5405
5602
  }
5406
5603
  }
5604
+ var PROMOTABLE_OUTCOMES = ["gotcha", "decision", "observation"];
5605
+ function isGeneralizable(entry) {
5606
+ for (const outcome of PROMOTABLE_OUTCOMES) {
5607
+ if (entry.includes(`[outcome:${outcome}]`)) return true;
5608
+ }
5609
+ return false;
5610
+ }
5611
+ async function promoteSessionLearnings(projectPath, sessionSlug, stream) {
5612
+ try {
5613
+ const sessionResult = await loadRelevantLearnings(projectPath, void 0, stream, sessionSlug);
5614
+ if (!sessionResult.ok) return sessionResult;
5615
+ const sessionEntries = sessionResult.value;
5616
+ if (sessionEntries.length === 0) {
5617
+ return Ok({ promoted: 0, skipped: 0 });
5618
+ }
5619
+ const toPromote = [];
5620
+ let skipped = 0;
5621
+ for (const entry of sessionEntries) {
5622
+ if (isGeneralizable(entry)) {
5623
+ toPromote.push(entry);
5624
+ } else {
5625
+ skipped++;
5626
+ }
5627
+ }
5628
+ if (toPromote.length === 0) {
5629
+ return Ok({ promoted: 0, skipped });
5630
+ }
5631
+ const dirResult = await getStateDir(projectPath, stream);
5632
+ if (!dirResult.ok) return dirResult;
5633
+ const stateDir = dirResult.value;
5634
+ const globalPath = path6.join(stateDir, LEARNINGS_FILE);
5635
+ const existingGlobal = fs9.existsSync(globalPath) ? fs9.readFileSync(globalPath, "utf-8") : "";
5636
+ const newEntries = toPromote.filter((entry) => !existingGlobal.includes(entry.trim()));
5637
+ if (newEntries.length === 0) {
5638
+ return Ok({ promoted: 0, skipped: skipped + toPromote.length });
5639
+ }
5640
+ const promotedContent = newEntries.join("\n\n") + "\n";
5641
+ if (!existingGlobal) {
5642
+ fs9.writeFileSync(globalPath, `# Learnings
5643
+
5644
+ ${promotedContent}`);
5645
+ } else {
5646
+ fs9.appendFileSync(globalPath, "\n\n" + promotedContent);
5647
+ }
5648
+ learningsCacheMap.delete(globalPath);
5649
+ return Ok({
5650
+ promoted: newEntries.length,
5651
+ skipped: skipped + (toPromote.length - newEntries.length)
5652
+ });
5653
+ } catch (error) {
5654
+ return Err(
5655
+ new Error(
5656
+ `Failed to promote session learnings: ${error instanceof Error ? error.message : String(error)}`
5657
+ )
5658
+ );
5659
+ }
5660
+ }
5661
+ async function countLearningEntries(projectPath, stream) {
5662
+ const loadResult = await loadRelevantLearnings(projectPath, void 0, stream);
5663
+ if (!loadResult.ok) return 0;
5664
+ return loadResult.value.length;
5665
+ }
5407
5666
 
5408
5667
  // src/state/failures.ts
5409
5668
  import * as fs10 from "fs";
@@ -5865,6 +6124,151 @@ async function archiveSession(projectPath, sessionSlug) {
5865
6124
  }
5866
6125
  }
5867
6126
 
6127
+ // src/state/events.ts
6128
+ import * as fs16 from "fs";
6129
+ import * as path13 from "path";
6130
+ import { z as z5 } from "zod";
6131
+ var SkillEventSchema = z5.object({
6132
+ timestamp: z5.string(),
6133
+ skill: z5.string(),
6134
+ session: z5.string().optional(),
6135
+ type: z5.enum(["phase_transition", "decision", "gate_result", "handoff", "error", "checkpoint"]),
6136
+ summary: z5.string(),
6137
+ data: z5.record(z5.unknown()).optional(),
6138
+ refs: z5.array(z5.string()).optional(),
6139
+ contentHash: z5.string().optional()
6140
+ });
6141
+ function computeEventHash(event, session) {
6142
+ const identity = `${event.skill}|${event.type}|${event.summary}|${session ?? ""}`;
6143
+ return computeContentHash(identity);
6144
+ }
6145
+ var knownHashesCache = /* @__PURE__ */ new Map();
6146
+ function loadKnownHashes(eventsPath) {
6147
+ const cached = knownHashesCache.get(eventsPath);
6148
+ if (cached) return cached;
6149
+ const hashes = /* @__PURE__ */ new Set();
6150
+ if (fs16.existsSync(eventsPath)) {
6151
+ const content = fs16.readFileSync(eventsPath, "utf-8");
6152
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
6153
+ for (const line of lines) {
6154
+ try {
6155
+ const existing = JSON.parse(line);
6156
+ if (existing.contentHash) {
6157
+ hashes.add(existing.contentHash);
6158
+ }
6159
+ } catch {
6160
+ }
6161
+ }
6162
+ }
6163
+ knownHashesCache.set(eventsPath, hashes);
6164
+ return hashes;
6165
+ }
6166
+ function clearEventHashCache() {
6167
+ knownHashesCache.clear();
6168
+ }
6169
+ async function emitEvent(projectPath, event, options) {
6170
+ try {
6171
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
6172
+ if (!dirResult.ok) return dirResult;
6173
+ const stateDir = dirResult.value;
6174
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
6175
+ fs16.mkdirSync(stateDir, { recursive: true });
6176
+ const contentHash = computeEventHash(event, options?.session);
6177
+ const knownHashes = loadKnownHashes(eventsPath);
6178
+ if (knownHashes.has(contentHash)) {
6179
+ return Ok({ written: false, reason: "duplicate" });
6180
+ }
6181
+ const fullEvent = {
6182
+ ...event,
6183
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
6184
+ contentHash
6185
+ };
6186
+ if (options?.session) {
6187
+ fullEvent.session = options.session;
6188
+ }
6189
+ fs16.appendFileSync(eventsPath, JSON.stringify(fullEvent) + "\n");
6190
+ knownHashes.add(contentHash);
6191
+ return Ok({ written: true });
6192
+ } catch (error) {
6193
+ return Err(
6194
+ new Error(`Failed to emit event: ${error instanceof Error ? error.message : String(error)}`)
6195
+ );
6196
+ }
6197
+ }
6198
+ async function loadEvents(projectPath, options) {
6199
+ try {
6200
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
6201
+ if (!dirResult.ok) return dirResult;
6202
+ const stateDir = dirResult.value;
6203
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
6204
+ if (!fs16.existsSync(eventsPath)) {
6205
+ return Ok([]);
6206
+ }
6207
+ const content = fs16.readFileSync(eventsPath, "utf-8");
6208
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
6209
+ const events = [];
6210
+ for (const line of lines) {
6211
+ try {
6212
+ const parsed = JSON.parse(line);
6213
+ const result = SkillEventSchema.safeParse(parsed);
6214
+ if (result.success) {
6215
+ events.push(result.data);
6216
+ }
6217
+ } catch {
6218
+ }
6219
+ }
6220
+ return Ok(events);
6221
+ } catch (error) {
6222
+ return Err(
6223
+ new Error(`Failed to load events: ${error instanceof Error ? error.message : String(error)}`)
6224
+ );
6225
+ }
6226
+ }
6227
+ function formatPhaseTransition(event) {
6228
+ const data = event.data;
6229
+ const suffix = data?.taskCount ? ` (${data.taskCount} tasks)` : "";
6230
+ return `phase: ${data?.from ?? "?"} -> ${data?.to ?? "?"}${suffix}`;
6231
+ }
6232
+ function formatGateResult(event) {
6233
+ const data = event.data;
6234
+ const status = data?.passed ? "passed" : "failed";
6235
+ const checks = data?.checks?.map((c) => `${c.name} ${c.passed ? "Y" : "N"}`).join(", ");
6236
+ return checks ? `gate: ${status} (${checks})` : `gate: ${status}`;
6237
+ }
6238
+ function formatHandoffDetail(event) {
6239
+ const data = event.data;
6240
+ const direction = data?.toSkill ? ` -> ${data.toSkill}` : "";
6241
+ return `handoff: ${event.summary}${direction}`;
6242
+ }
6243
+ var EVENT_FORMATTERS = {
6244
+ phase_transition: formatPhaseTransition,
6245
+ gate_result: formatGateResult,
6246
+ decision: (event) => `decision: ${event.summary}`,
6247
+ handoff: formatHandoffDetail,
6248
+ error: (event) => `error: ${event.summary}`,
6249
+ checkpoint: (event) => `checkpoint: ${event.summary}`
6250
+ };
6251
+ function formatEventTimeline(events, limit = 20) {
6252
+ if (events.length === 0) return "";
6253
+ const recent = events.slice(-limit);
6254
+ return recent.map((event) => {
6255
+ const time = formatTime(event.timestamp);
6256
+ const formatter = EVENT_FORMATTERS[event.type];
6257
+ const detail = formatter ? formatter(event) : event.summary;
6258
+ return `- ${time} [${event.skill}] ${detail}`;
6259
+ }).join("\n");
6260
+ }
6261
+ function formatTime(timestamp) {
6262
+ try {
6263
+ const date = new Date(timestamp);
6264
+ const hours = String(date.getHours()).padStart(2, "0");
6265
+ const minutes = String(date.getMinutes()).padStart(2, "0");
6266
+ return `${hours}:${minutes}`;
6267
+ } catch {
6268
+ return "??:??";
6269
+ }
6270
+ }
6271
+
5868
6272
  // src/workflow/runner.ts
5869
6273
  async function executeWorkflow(workflow, executor) {
5870
6274
  const stepResults = [];
@@ -6014,7 +6418,8 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
6014
6418
  }
6015
6419
 
6016
6420
  // src/security/scanner.ts
6017
- import * as fs17 from "fs/promises";
6421
+ import * as fs18 from "fs/promises";
6422
+ import { minimatch as minimatch4 } from "minimatch";
6018
6423
 
6019
6424
  // src/security/rules/registry.ts
6020
6425
  var RuleRegistry = class {
@@ -6045,7 +6450,7 @@ var RuleRegistry = class {
6045
6450
  };
6046
6451
 
6047
6452
  // src/security/config.ts
6048
- import { z as z5 } from "zod";
6453
+ import { z as z6 } from "zod";
6049
6454
 
6050
6455
  // src/security/types.ts
6051
6456
  var DEFAULT_SECURITY_CONFIG = {
@@ -6056,19 +6461,19 @@ var DEFAULT_SECURITY_CONFIG = {
6056
6461
  };
6057
6462
 
6058
6463
  // src/security/config.ts
6059
- var RuleOverrideSchema = z5.enum(["off", "error", "warning", "info"]);
6060
- var SecurityConfigSchema = z5.object({
6061
- enabled: z5.boolean().default(true),
6062
- strict: z5.boolean().default(false),
6063
- rules: z5.record(z5.string(), RuleOverrideSchema).optional().default({}),
6064
- exclude: z5.array(z5.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
6065
- external: z5.object({
6066
- semgrep: z5.object({
6067
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto"),
6068
- rulesets: z5.array(z5.string()).optional()
6464
+ var RuleOverrideSchema = z6.enum(["off", "error", "warning", "info"]);
6465
+ var SecurityConfigSchema = z6.object({
6466
+ enabled: z6.boolean().default(true),
6467
+ strict: z6.boolean().default(false),
6468
+ rules: z6.record(z6.string(), RuleOverrideSchema).optional().default({}),
6469
+ exclude: z6.array(z6.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
6470
+ external: z6.object({
6471
+ semgrep: z6.object({
6472
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto"),
6473
+ rulesets: z6.array(z6.string()).optional()
6069
6474
  }).optional(),
6070
- gitleaks: z5.object({
6071
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto")
6475
+ gitleaks: z6.object({
6476
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto")
6072
6477
  }).optional()
6073
6478
  }).optional()
6074
6479
  });
@@ -6101,15 +6506,15 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
6101
6506
  }
6102
6507
 
6103
6508
  // src/security/stack-detector.ts
6104
- import * as fs16 from "fs";
6105
- import * as path13 from "path";
6509
+ import * as fs17 from "fs";
6510
+ import * as path14 from "path";
6106
6511
  function detectStack(projectRoot) {
6107
6512
  const stacks = [];
6108
- const pkgJsonPath = path13.join(projectRoot, "package.json");
6109
- if (fs16.existsSync(pkgJsonPath)) {
6513
+ const pkgJsonPath = path14.join(projectRoot, "package.json");
6514
+ if (fs17.existsSync(pkgJsonPath)) {
6110
6515
  stacks.push("node");
6111
6516
  try {
6112
- const pkgJson = JSON.parse(fs16.readFileSync(pkgJsonPath, "utf-8"));
6517
+ const pkgJson = JSON.parse(fs17.readFileSync(pkgJsonPath, "utf-8"));
6113
6518
  const allDeps = {
6114
6519
  ...pkgJson.dependencies,
6115
6520
  ...pkgJson.devDependencies
@@ -6124,13 +6529,13 @@ function detectStack(projectRoot) {
6124
6529
  } catch {
6125
6530
  }
6126
6531
  }
6127
- const goModPath = path13.join(projectRoot, "go.mod");
6128
- if (fs16.existsSync(goModPath)) {
6532
+ const goModPath = path14.join(projectRoot, "go.mod");
6533
+ if (fs17.existsSync(goModPath)) {
6129
6534
  stacks.push("go");
6130
6535
  }
6131
- const requirementsPath = path13.join(projectRoot, "requirements.txt");
6132
- const pyprojectPath = path13.join(projectRoot, "pyproject.toml");
6133
- if (fs16.existsSync(requirementsPath) || fs16.existsSync(pyprojectPath)) {
6536
+ const requirementsPath = path14.join(projectRoot, "requirements.txt");
6537
+ const pyprojectPath = path14.join(projectRoot, "pyproject.toml");
6538
+ if (fs17.existsSync(requirementsPath) || fs17.existsSync(pyprojectPath)) {
6134
6539
  stacks.push("python");
6135
6540
  }
6136
6541
  return stacks;
@@ -6194,6 +6599,72 @@ var secretRules = [
6194
6599
  message: "Hardcoded JWT token detected",
6195
6600
  remediation: "Tokens should be fetched at runtime, not embedded in source",
6196
6601
  references: ["CWE-798"]
6602
+ },
6603
+ {
6604
+ id: "SEC-SEC-006",
6605
+ name: "Anthropic API Key",
6606
+ category: "secrets",
6607
+ severity: "error",
6608
+ confidence: "high",
6609
+ patterns: [/sk-ant-api\d{2}-[A-Za-z0-9_-]{20,}/],
6610
+ message: "Hardcoded Anthropic API key detected",
6611
+ remediation: "Use environment variables: process.env.ANTHROPIC_API_KEY",
6612
+ references: ["CWE-798"]
6613
+ },
6614
+ {
6615
+ id: "SEC-SEC-007",
6616
+ name: "OpenAI API Key",
6617
+ category: "secrets",
6618
+ severity: "error",
6619
+ confidence: "high",
6620
+ patterns: [/sk-proj-[A-Za-z0-9_-]{20,}/],
6621
+ message: "Hardcoded OpenAI API key detected",
6622
+ remediation: "Use environment variables: process.env.OPENAI_API_KEY",
6623
+ references: ["CWE-798"]
6624
+ },
6625
+ {
6626
+ id: "SEC-SEC-008",
6627
+ name: "Google API Key",
6628
+ category: "secrets",
6629
+ severity: "error",
6630
+ confidence: "high",
6631
+ patterns: [/AIza[A-Za-z0-9_-]{35}/],
6632
+ message: "Hardcoded Google API key detected",
6633
+ remediation: "Use environment variables or a secrets manager for Google API keys",
6634
+ references: ["CWE-798"]
6635
+ },
6636
+ {
6637
+ id: "SEC-SEC-009",
6638
+ name: "GitHub Personal Access Token",
6639
+ category: "secrets",
6640
+ severity: "error",
6641
+ confidence: "high",
6642
+ patterns: [/gh[pous]_[A-Za-z0-9_]{36,}/],
6643
+ message: "Hardcoded GitHub personal access token detected",
6644
+ remediation: "Use environment variables: process.env.GITHUB_TOKEN",
6645
+ references: ["CWE-798"]
6646
+ },
6647
+ {
6648
+ id: "SEC-SEC-010",
6649
+ name: "Stripe Live Key",
6650
+ category: "secrets",
6651
+ severity: "error",
6652
+ confidence: "high",
6653
+ patterns: [/\b[spr]k_live_[A-Za-z0-9]{24,}/],
6654
+ message: "Hardcoded Stripe live key detected",
6655
+ remediation: "Use environment variables for Stripe keys; never commit live keys",
6656
+ references: ["CWE-798"]
6657
+ },
6658
+ {
6659
+ id: "SEC-SEC-011",
6660
+ name: "Database Connection String with Credentials",
6661
+ category: "secrets",
6662
+ severity: "error",
6663
+ confidence: "high",
6664
+ patterns: [/(?:postgres|mysql|mongodb|redis|amqp|mssql)(?:\+\w+)?:\/\/[^/\s:]+:[^@/\s]+@/i],
6665
+ message: "Database connection string with embedded credentials detected",
6666
+ remediation: "Use environment variables for connection strings; separate credentials from URIs",
6667
+ references: ["CWE-798"]
6197
6668
  }
6198
6669
  ];
6199
6670
 
@@ -6380,6 +6851,158 @@ var deserializationRules = [
6380
6851
  }
6381
6852
  ];
6382
6853
 
6854
+ // src/security/rules/agent-config.ts
6855
+ var agentConfigRules = [
6856
+ {
6857
+ id: "SEC-AGT-001",
6858
+ name: "Hidden Unicode Characters",
6859
+ category: "agent-config",
6860
+ severity: "error",
6861
+ confidence: "high",
6862
+ patterns: [/\u200B|\u200C|\u200D|\uFEFF|\u2060/],
6863
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/*.yaml",
6864
+ message: "Hidden zero-width Unicode characters detected in agent configuration",
6865
+ remediation: "Remove invisible Unicode characters; they may hide malicious instructions",
6866
+ references: ["CWE-116"]
6867
+ },
6868
+ {
6869
+ id: "SEC-AGT-002",
6870
+ name: "URL Execution Directives",
6871
+ category: "agent-config",
6872
+ severity: "warning",
6873
+ confidence: "medium",
6874
+ patterns: [/\b(?:curl|wget)\s+\S+/i, /\bfetch\s*\(/i],
6875
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md",
6876
+ message: "URL execution directive found in agent configuration",
6877
+ remediation: "Avoid instructing agents to download and execute remote content",
6878
+ references: ["CWE-94"]
6879
+ },
6880
+ {
6881
+ id: "SEC-AGT-003",
6882
+ name: "Wildcard Tool Permissions",
6883
+ category: "agent-config",
6884
+ severity: "warning",
6885
+ confidence: "high",
6886
+ patterns: [/(?:Bash|Write|Edit)\s*\(\s*\*\s*\)/],
6887
+ fileGlob: "**/.claude/**,**/settings*.json",
6888
+ message: "Wildcard tool permissions grant unrestricted access",
6889
+ remediation: "Scope tool permissions to specific patterns instead of wildcards",
6890
+ references: ["CWE-250"]
6891
+ },
6892
+ {
6893
+ id: "SEC-AGT-004",
6894
+ name: "Auto-approve Patterns",
6895
+ category: "agent-config",
6896
+ severity: "warning",
6897
+ confidence: "high",
6898
+ patterns: [/\bautoApprove\b/i, /\bauto_approve\b/i],
6899
+ fileGlob: "**/.claude/**,**/.mcp.json",
6900
+ message: "Auto-approve configuration bypasses human review of tool calls",
6901
+ remediation: "Review auto-approved tools carefully; prefer explicit approval for destructive operations",
6902
+ references: ["CWE-862"]
6903
+ },
6904
+ {
6905
+ id: "SEC-AGT-005",
6906
+ name: "Prompt Injection Surface",
6907
+ category: "agent-config",
6908
+ severity: "warning",
6909
+ confidence: "medium",
6910
+ patterns: [/\$\{[^}]*\}/, /\{\{[^}]*\}\}/],
6911
+ fileGlob: "**/skill.yaml",
6912
+ message: "Template interpolation syntax in skill YAML may enable prompt injection",
6913
+ remediation: "Avoid dynamic interpolation in skill descriptions; use static text",
6914
+ references: ["CWE-94"]
6915
+ },
6916
+ {
6917
+ id: "SEC-AGT-006",
6918
+ name: "Permission Bypass Flags",
6919
+ category: "agent-config",
6920
+ severity: "error",
6921
+ confidence: "high",
6922
+ patterns: [/--dangerously-skip-permissions/, /--no-verify/],
6923
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/.claude/**",
6924
+ message: "Permission bypass flag detected in agent configuration",
6925
+ remediation: "Remove flags that bypass safety checks; they undermine enforcement",
6926
+ references: ["CWE-863"]
6927
+ },
6928
+ {
6929
+ id: "SEC-AGT-007",
6930
+ name: "Hook Injection Surface",
6931
+ category: "agent-config",
6932
+ severity: "error",
6933
+ confidence: "low",
6934
+ patterns: [/\$\(/, /`[^`]+`/, /\s&&\s/, /\s\|\|\s/],
6935
+ fileGlob: "**/settings*.json,**/hooks.json",
6936
+ message: "Shell metacharacters in hook commands may enable command injection",
6937
+ remediation: "Use simple, single-command hooks without shell operators; chain logic inside the script",
6938
+ references: ["CWE-78"]
6939
+ }
6940
+ ];
6941
+
6942
+ // src/security/rules/mcp.ts
6943
+ var mcpRules = [
6944
+ {
6945
+ id: "SEC-MCP-001",
6946
+ name: "Hardcoded MCP Secrets",
6947
+ category: "mcp",
6948
+ severity: "error",
6949
+ confidence: "medium",
6950
+ patterns: [/(?:API_KEY|SECRET|TOKEN|PASSWORD|CREDENTIAL)\s*["']?\s*:\s*["'][^"']{8,}["']/i],
6951
+ fileGlob: "**/.mcp.json",
6952
+ message: "Hardcoded secret detected in MCP server configuration",
6953
+ remediation: "Use environment variable references instead of inline secrets in .mcp.json",
6954
+ references: ["CWE-798"]
6955
+ },
6956
+ {
6957
+ id: "SEC-MCP-002",
6958
+ name: "Shell Injection in MCP Args",
6959
+ category: "mcp",
6960
+ severity: "error",
6961
+ confidence: "medium",
6962
+ patterns: [/\$\(/, /`[^`]+`/],
6963
+ fileGlob: "**/.mcp.json",
6964
+ message: "Shell metacharacters detected in MCP server arguments",
6965
+ remediation: "Use literal argument values; avoid shell interpolation in MCP args",
6966
+ references: ["CWE-78"]
6967
+ },
6968
+ {
6969
+ id: "SEC-MCP-003",
6970
+ name: "Network Exposure",
6971
+ category: "mcp",
6972
+ severity: "warning",
6973
+ confidence: "high",
6974
+ patterns: [/0\.0\.0\.0/, /["']\*["']\s*:\s*\d/, /host["']?\s*:\s*["']\*["']/i],
6975
+ fileGlob: "**/.mcp.json",
6976
+ message: "MCP server binding to all network interfaces (0.0.0.0 or wildcard *)",
6977
+ remediation: "Bind to 127.0.0.1 or localhost to restrict access to local machine",
6978
+ references: ["CWE-668"]
6979
+ },
6980
+ {
6981
+ id: "SEC-MCP-004",
6982
+ name: "Typosquatting Vector",
6983
+ category: "mcp",
6984
+ severity: "warning",
6985
+ confidence: "medium",
6986
+ patterns: [/\bnpx\s+(?:-y|--yes)\b/],
6987
+ fileGlob: "**/.mcp.json",
6988
+ message: "npx -y auto-installs packages without confirmation, enabling typosquatting",
6989
+ remediation: "Pin exact package versions or install packages explicitly before use",
6990
+ references: ["CWE-427"]
6991
+ },
6992
+ {
6993
+ id: "SEC-MCP-005",
6994
+ name: "Unencrypted Transport",
6995
+ category: "mcp",
6996
+ severity: "warning",
6997
+ confidence: "medium",
6998
+ patterns: [/http:\/\/(?!localhost\b|127\.0\.0\.1\b)/],
6999
+ fileGlob: "**/.mcp.json",
7000
+ message: "Unencrypted HTTP transport detected for MCP server connection",
7001
+ remediation: "Use https:// for all non-localhost MCP server connections",
7002
+ references: ["CWE-319"]
7003
+ }
7004
+ ];
7005
+
6383
7006
  // src/security/rules/stack/node.ts
6384
7007
  var nodeRules = [
6385
7008
  {
@@ -6507,7 +7130,9 @@ var SecurityScanner = class {
6507
7130
  ...cryptoRules,
6508
7131
  ...pathTraversalRules,
6509
7132
  ...networkRules,
6510
- ...deserializationRules
7133
+ ...deserializationRules,
7134
+ ...agentConfigRules,
7135
+ ...mcpRules
6511
7136
  ]);
6512
7137
  this.registry.registerAll([...nodeRules, ...expressRules, ...reactRules, ...goRules]);
6513
7138
  this.activeRules = this.registry.getAll();
@@ -6516,6 +7141,12 @@ var SecurityScanner = class {
6516
7141
  const stacks = detectStack(projectRoot);
6517
7142
  this.activeRules = this.registry.getForStacks(stacks.length > 0 ? stacks : []);
6518
7143
  }
7144
+ /**
7145
+ * Scan raw content against all active rules. Note: this method does NOT apply
7146
+ * fileGlob filtering — every active rule is evaluated regardless of filePath.
7147
+ * If you are scanning a specific file and want fileGlob-based rule filtering,
7148
+ * use {@link scanFile} instead.
7149
+ */
6519
7150
  scanContent(content, filePath, startLine = 1) {
6520
7151
  if (!this.config.enabled) return [];
6521
7152
  const findings = [];
@@ -6557,8 +7188,52 @@ var SecurityScanner = class {
6557
7188
  }
6558
7189
  async scanFile(filePath) {
6559
7190
  if (!this.config.enabled) return [];
6560
- const content = await fs17.readFile(filePath, "utf-8");
6561
- return this.scanContent(content, filePath, 1);
7191
+ const content = await fs18.readFile(filePath, "utf-8");
7192
+ return this.scanContentForFile(content, filePath, 1);
7193
+ }
7194
+ scanContentForFile(content, filePath, startLine = 1) {
7195
+ if (!this.config.enabled) return [];
7196
+ const findings = [];
7197
+ const lines = content.split("\n");
7198
+ const applicableRules = this.activeRules.filter((rule) => {
7199
+ if (!rule.fileGlob) return true;
7200
+ const globs = rule.fileGlob.split(",").map((g) => g.trim());
7201
+ return globs.some((glob) => minimatch4(filePath, glob, { dot: true }));
7202
+ });
7203
+ for (const rule of applicableRules) {
7204
+ const resolved = resolveRuleSeverity(
7205
+ rule.id,
7206
+ rule.severity,
7207
+ this.config.rules ?? {},
7208
+ this.config.strict
7209
+ );
7210
+ if (resolved === "off") continue;
7211
+ for (let i = 0; i < lines.length; i++) {
7212
+ const line = lines[i] ?? "";
7213
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
7214
+ for (const pattern of rule.patterns) {
7215
+ pattern.lastIndex = 0;
7216
+ if (pattern.test(line)) {
7217
+ findings.push({
7218
+ ruleId: rule.id,
7219
+ ruleName: rule.name,
7220
+ category: rule.category,
7221
+ severity: resolved,
7222
+ confidence: rule.confidence,
7223
+ file: filePath,
7224
+ line: startLine + i,
7225
+ match: line.trim(),
7226
+ context: line,
7227
+ message: rule.message,
7228
+ remediation: rule.remediation,
7229
+ ...rule.references ? { references: rule.references } : {}
7230
+ });
7231
+ break;
7232
+ }
7233
+ }
7234
+ }
7235
+ }
7236
+ return findings;
6562
7237
  }
6563
7238
  async scanFiles(filePaths) {
6564
7239
  const allFindings = [];
@@ -6582,7 +7257,7 @@ var SecurityScanner = class {
6582
7257
  };
6583
7258
 
6584
7259
  // src/ci/check-orchestrator.ts
6585
- import * as path14 from "path";
7260
+ import * as path15 from "path";
6586
7261
  var ALL_CHECKS = [
6587
7262
  "validate",
6588
7263
  "deps",
@@ -6595,7 +7270,7 @@ var ALL_CHECKS = [
6595
7270
  ];
6596
7271
  async function runValidateCheck(projectRoot, config) {
6597
7272
  const issues = [];
6598
- const agentsPath = path14.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7273
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6599
7274
  const result = await validateAgentsMap(agentsPath);
6600
7275
  if (!result.ok) {
6601
7276
  issues.push({ severity: "error", message: result.error.message });
@@ -6652,7 +7327,7 @@ async function runDepsCheck(projectRoot, config) {
6652
7327
  }
6653
7328
  async function runDocsCheck(projectRoot, config) {
6654
7329
  const issues = [];
6655
- const docsDir = path14.join(projectRoot, config.docsDir ?? "docs");
7330
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
6656
7331
  const entropyConfig = config.entropy || {};
6657
7332
  const result = await checkDocCoverage("project", {
6658
7333
  docsDir,
@@ -6930,7 +7605,7 @@ async function runCIChecks(input) {
6930
7605
  }
6931
7606
 
6932
7607
  // src/review/mechanical-checks.ts
6933
- import * as path15 from "path";
7608
+ import * as path16 from "path";
6934
7609
  async function runMechanicalChecks(options) {
6935
7610
  const { projectRoot, config, skip = [], changedFiles } = options;
6936
7611
  const findings = [];
@@ -6942,7 +7617,7 @@ async function runMechanicalChecks(options) {
6942
7617
  };
6943
7618
  if (!skip.includes("validate")) {
6944
7619
  try {
6945
- const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7620
+ const agentsPath = path16.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6946
7621
  const result = await validateAgentsMap(agentsPath);
6947
7622
  if (!result.ok) {
6948
7623
  statuses.validate = "fail";
@@ -6979,7 +7654,7 @@ async function runMechanicalChecks(options) {
6979
7654
  statuses.validate = "fail";
6980
7655
  findings.push({
6981
7656
  tool: "validate",
6982
- file: path15.join(projectRoot, "AGENTS.md"),
7657
+ file: path16.join(projectRoot, "AGENTS.md"),
6983
7658
  message: err instanceof Error ? err.message : String(err),
6984
7659
  severity: "error"
6985
7660
  });
@@ -7043,7 +7718,7 @@ async function runMechanicalChecks(options) {
7043
7718
  (async () => {
7044
7719
  const localFindings = [];
7045
7720
  try {
7046
- const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
7721
+ const docsDir = path16.join(projectRoot, config.docsDir ?? "docs");
7047
7722
  const result = await checkDocCoverage("project", { docsDir });
7048
7723
  if (!result.ok) {
7049
7724
  statuses["check-docs"] = "warn";
@@ -7070,7 +7745,7 @@ async function runMechanicalChecks(options) {
7070
7745
  statuses["check-docs"] = "warn";
7071
7746
  localFindings.push({
7072
7747
  tool: "check-docs",
7073
- file: path15.join(projectRoot, "docs"),
7748
+ file: path16.join(projectRoot, "docs"),
7074
7749
  message: err instanceof Error ? err.message : String(err),
7075
7750
  severity: "warning"
7076
7751
  });
@@ -7218,7 +7893,7 @@ function detectChangeType(commitMessage, diff2) {
7218
7893
  }
7219
7894
 
7220
7895
  // src/review/context-scoper.ts
7221
- import * as path16 from "path";
7896
+ import * as path17 from "path";
7222
7897
  var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
7223
7898
  var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
7224
7899
  function computeContextBudget(diffLines) {
@@ -7226,18 +7901,18 @@ function computeContextBudget(diffLines) {
7226
7901
  return diffLines;
7227
7902
  }
7228
7903
  function isWithinProject(absPath, projectRoot) {
7229
- const resolvedRoot = path16.resolve(projectRoot) + path16.sep;
7230
- const resolvedPath = path16.resolve(absPath);
7231
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path16.resolve(projectRoot);
7904
+ const resolvedRoot = path17.resolve(projectRoot) + path17.sep;
7905
+ const resolvedPath = path17.resolve(absPath);
7906
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path17.resolve(projectRoot);
7232
7907
  }
7233
7908
  async function readContextFile(projectRoot, filePath, reason) {
7234
- const absPath = path16.isAbsolute(filePath) ? filePath : path16.join(projectRoot, filePath);
7909
+ const absPath = path17.isAbsolute(filePath) ? filePath : path17.join(projectRoot, filePath);
7235
7910
  if (!isWithinProject(absPath, projectRoot)) return null;
7236
7911
  const result = await readFileContent(absPath);
7237
7912
  if (!result.ok) return null;
7238
7913
  const content = result.value;
7239
7914
  const lines = content.split("\n").length;
7240
- const relPath = path16.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
7915
+ const relPath = path17.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
7241
7916
  return { path: relPath, content, reason, lines };
7242
7917
  }
7243
7918
  function extractImportSources(content) {
@@ -7252,18 +7927,18 @@ function extractImportSources(content) {
7252
7927
  }
7253
7928
  async function resolveImportPath(projectRoot, fromFile, importSource) {
7254
7929
  if (!importSource.startsWith(".")) return null;
7255
- const fromDir = path16.dirname(path16.join(projectRoot, fromFile));
7256
- const basePath = path16.resolve(fromDir, importSource);
7930
+ const fromDir = path17.dirname(path17.join(projectRoot, fromFile));
7931
+ const basePath = path17.resolve(fromDir, importSource);
7257
7932
  if (!isWithinProject(basePath, projectRoot)) return null;
7258
7933
  const relBase = relativePosix(projectRoot, basePath);
7259
7934
  const candidates = [
7260
7935
  relBase + ".ts",
7261
7936
  relBase + ".tsx",
7262
7937
  relBase + ".mts",
7263
- path16.join(relBase, "index.ts")
7938
+ path17.join(relBase, "index.ts")
7264
7939
  ];
7265
7940
  for (const candidate of candidates) {
7266
- const absCandidate = path16.join(projectRoot, candidate);
7941
+ const absCandidate = path17.join(projectRoot, candidate);
7267
7942
  if (await fileExists(absCandidate)) {
7268
7943
  return candidate;
7269
7944
  }
@@ -7271,7 +7946,7 @@ async function resolveImportPath(projectRoot, fromFile, importSource) {
7271
7946
  return null;
7272
7947
  }
7273
7948
  async function findTestFiles(projectRoot, sourceFile) {
7274
- const baseName = path16.basename(sourceFile, path16.extname(sourceFile));
7949
+ const baseName = path17.basename(sourceFile, path17.extname(sourceFile));
7275
7950
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
7276
7951
  const results = await findFiles(pattern, projectRoot);
7277
7952
  return results.map((f) => relativePosix(projectRoot, f));
@@ -8080,7 +8755,7 @@ async function fanOutReview(options) {
8080
8755
  }
8081
8756
 
8082
8757
  // src/review/validate-findings.ts
8083
- import * as path17 from "path";
8758
+ import * as path18 from "path";
8084
8759
  var DOWNGRADE_MAP = {
8085
8760
  critical: "important",
8086
8761
  important: "suggestion",
@@ -8101,7 +8776,7 @@ function normalizePath(filePath, projectRoot) {
8101
8776
  let normalized = filePath;
8102
8777
  normalized = normalized.replace(/\\/g, "/");
8103
8778
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
8104
- if (path17.isAbsolute(normalized)) {
8779
+ if (path18.isAbsolute(normalized)) {
8105
8780
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
8106
8781
  if (normalized.startsWith(root)) {
8107
8782
  normalized = normalized.slice(root.length);
@@ -8126,12 +8801,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
8126
8801
  while ((match = importRegex.exec(content)) !== null) {
8127
8802
  const importPath = match[1];
8128
8803
  if (!importPath.startsWith(".")) continue;
8129
- const dir = path17.dirname(current.file);
8130
- let resolved = path17.join(dir, importPath).replace(/\\/g, "/");
8804
+ const dir = path18.dirname(current.file);
8805
+ let resolved = path18.join(dir, importPath).replace(/\\/g, "/");
8131
8806
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
8132
8807
  resolved += ".ts";
8133
8808
  }
8134
- resolved = path17.normalize(resolved).replace(/\\/g, "/");
8809
+ resolved = path18.normalize(resolved).replace(/\\/g, "/");
8135
8810
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
8136
8811
  queue.push({ file: resolved, depth: current.depth + 1 });
8137
8812
  }
@@ -8148,7 +8823,7 @@ async function validateFindings(options) {
8148
8823
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
8149
8824
  continue;
8150
8825
  }
8151
- const absoluteFile = path17.isAbsolute(finding.file) ? finding.file : path17.join(projectRoot, finding.file).replace(/\\/g, "/");
8826
+ const absoluteFile = path18.isAbsolute(finding.file) ? finding.file : path18.join(projectRoot, finding.file).replace(/\\/g, "/");
8152
8827
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
8153
8828
  continue;
8154
8829
  }
@@ -8776,7 +9451,7 @@ function parseRoadmap(markdown) {
8776
9451
  if (!fmMatch) {
8777
9452
  return Err2(new Error("Missing or malformed YAML frontmatter"));
8778
9453
  }
8779
- const fmResult = parseFrontmatter(fmMatch[1]);
9454
+ const fmResult = parseFrontmatter2(fmMatch[1]);
8780
9455
  if (!fmResult.ok) return fmResult;
8781
9456
  const body = markdown.slice(fmMatch[0].length);
8782
9457
  const milestonesResult = parseMilestones(body);
@@ -8786,7 +9461,7 @@ function parseRoadmap(markdown) {
8786
9461
  milestones: milestonesResult.value
8787
9462
  });
8788
9463
  }
8789
- function parseFrontmatter(raw) {
9464
+ function parseFrontmatter2(raw) {
8790
9465
  const lines = raw.split("\n");
8791
9466
  const map = /* @__PURE__ */ new Map();
8792
9467
  for (const line of lines) {
@@ -8952,8 +9627,8 @@ function serializeFeature(feature) {
8952
9627
  }
8953
9628
 
8954
9629
  // src/roadmap/sync.ts
8955
- import * as fs18 from "fs";
8956
- import * as path18 from "path";
9630
+ import * as fs19 from "fs";
9631
+ import * as path19 from "path";
8957
9632
  import { Ok as Ok3 } from "@harness-engineering/types";
8958
9633
  function inferStatus(feature, projectPath, allFeatures) {
8959
9634
  if (feature.blockedBy.length > 0) {
@@ -8968,10 +9643,10 @@ function inferStatus(feature, projectPath, allFeatures) {
8968
9643
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
8969
9644
  const useRootState = featuresWithPlans.length <= 1;
8970
9645
  if (useRootState) {
8971
- const rootStatePath = path18.join(projectPath, ".harness", "state.json");
8972
- if (fs18.existsSync(rootStatePath)) {
9646
+ const rootStatePath = path19.join(projectPath, ".harness", "state.json");
9647
+ if (fs19.existsSync(rootStatePath)) {
8973
9648
  try {
8974
- const raw = fs18.readFileSync(rootStatePath, "utf-8");
9649
+ const raw = fs19.readFileSync(rootStatePath, "utf-8");
8975
9650
  const state = JSON.parse(raw);
8976
9651
  if (state.progress) {
8977
9652
  for (const status of Object.values(state.progress)) {
@@ -8982,16 +9657,16 @@ function inferStatus(feature, projectPath, allFeatures) {
8982
9657
  }
8983
9658
  }
8984
9659
  }
8985
- const sessionsDir = path18.join(projectPath, ".harness", "sessions");
8986
- if (fs18.existsSync(sessionsDir)) {
9660
+ const sessionsDir = path19.join(projectPath, ".harness", "sessions");
9661
+ if (fs19.existsSync(sessionsDir)) {
8987
9662
  try {
8988
- const sessionDirs = fs18.readdirSync(sessionsDir, { withFileTypes: true });
9663
+ const sessionDirs = fs19.readdirSync(sessionsDir, { withFileTypes: true });
8989
9664
  for (const entry of sessionDirs) {
8990
9665
  if (!entry.isDirectory()) continue;
8991
- const autopilotPath = path18.join(sessionsDir, entry.name, "autopilot-state.json");
8992
- if (!fs18.existsSync(autopilotPath)) continue;
9666
+ const autopilotPath = path19.join(sessionsDir, entry.name, "autopilot-state.json");
9667
+ if (!fs19.existsSync(autopilotPath)) continue;
8993
9668
  try {
8994
- const raw = fs18.readFileSync(autopilotPath, "utf-8");
9669
+ const raw = fs19.readFileSync(autopilotPath, "utf-8");
8995
9670
  const autopilot = JSON.parse(raw);
8996
9671
  if (!autopilot.phases) continue;
8997
9672
  const linkedPhases = autopilot.phases.filter(
@@ -9021,17 +9696,26 @@ function inferStatus(feature, projectPath, allFeatures) {
9021
9696
  if (anyStarted) return "in-progress";
9022
9697
  return null;
9023
9698
  }
9699
+ var STATUS_RANK = {
9700
+ backlog: 0,
9701
+ planned: 1,
9702
+ blocked: 1,
9703
+ // lateral to planned — sync can move to/from blocked freely
9704
+ "in-progress": 2,
9705
+ done: 3
9706
+ };
9707
+ function isRegression(from, to) {
9708
+ return STATUS_RANK[to] < STATUS_RANK[from];
9709
+ }
9024
9710
  function syncRoadmap(options) {
9025
9711
  const { projectPath, roadmap, forceSync } = options;
9026
- const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
9027
- const skipOverride = isManuallyEdited && !forceSync;
9028
9712
  const allFeatures = roadmap.milestones.flatMap((m) => m.features);
9029
9713
  const changes = [];
9030
9714
  for (const feature of allFeatures) {
9031
- if (skipOverride) continue;
9032
9715
  const inferred = inferStatus(feature, projectPath, allFeatures);
9033
9716
  if (inferred === null) continue;
9034
9717
  if (inferred === feature.status) continue;
9718
+ if (!forceSync && isRegression(feature.status, inferred)) continue;
9035
9719
  changes.push({
9036
9720
  feature: feature.name,
9037
9721
  from: feature.status,
@@ -9040,48 +9724,60 @@ function syncRoadmap(options) {
9040
9724
  }
9041
9725
  return Ok3(changes);
9042
9726
  }
9727
+ function applySyncChanges(roadmap, changes) {
9728
+ for (const change of changes) {
9729
+ for (const m of roadmap.milestones) {
9730
+ const feature = m.features.find((f) => f.name.toLowerCase() === change.feature.toLowerCase());
9731
+ if (feature) {
9732
+ feature.status = change.to;
9733
+ break;
9734
+ }
9735
+ }
9736
+ }
9737
+ roadmap.frontmatter.lastSynced = (/* @__PURE__ */ new Date()).toISOString();
9738
+ }
9043
9739
 
9044
9740
  // src/interaction/types.ts
9045
- import { z as z6 } from "zod";
9046
- var InteractionTypeSchema = z6.enum(["question", "confirmation", "transition"]);
9047
- var QuestionSchema = z6.object({
9048
- text: z6.string(),
9049
- options: z6.array(z6.string()).optional(),
9050
- default: z6.string().optional()
9741
+ import { z as z7 } from "zod";
9742
+ var InteractionTypeSchema = z7.enum(["question", "confirmation", "transition"]);
9743
+ var QuestionSchema = z7.object({
9744
+ text: z7.string(),
9745
+ options: z7.array(z7.string()).optional(),
9746
+ default: z7.string().optional()
9051
9747
  });
9052
- var ConfirmationSchema = z6.object({
9053
- text: z6.string(),
9054
- context: z6.string()
9748
+ var ConfirmationSchema = z7.object({
9749
+ text: z7.string(),
9750
+ context: z7.string()
9055
9751
  });
9056
- var TransitionSchema = z6.object({
9057
- completedPhase: z6.string(),
9058
- suggestedNext: z6.string(),
9059
- reason: z6.string(),
9060
- artifacts: z6.array(z6.string()),
9061
- requiresConfirmation: z6.boolean(),
9062
- summary: z6.string()
9752
+ var TransitionSchema = z7.object({
9753
+ completedPhase: z7.string(),
9754
+ suggestedNext: z7.string(),
9755
+ reason: z7.string(),
9756
+ artifacts: z7.array(z7.string()),
9757
+ requiresConfirmation: z7.boolean(),
9758
+ summary: z7.string()
9063
9759
  });
9064
- var EmitInteractionInputSchema = z6.object({
9065
- path: z6.string(),
9760
+ var EmitInteractionInputSchema = z7.object({
9761
+ path: z7.string(),
9066
9762
  type: InteractionTypeSchema,
9067
- stream: z6.string().optional(),
9763
+ stream: z7.string().optional(),
9068
9764
  question: QuestionSchema.optional(),
9069
9765
  confirmation: ConfirmationSchema.optional(),
9070
9766
  transition: TransitionSchema.optional()
9071
9767
  });
9072
9768
 
9073
9769
  // src/blueprint/scanner.ts
9074
- import * as fs19 from "fs/promises";
9075
- import * as path19 from "path";
9770
+ import * as fs20 from "fs/promises";
9771
+ import * as path20 from "path";
9076
9772
  var ProjectScanner = class {
9077
9773
  constructor(rootDir) {
9078
9774
  this.rootDir = rootDir;
9079
9775
  }
9080
9776
  async scan() {
9081
- let projectName = path19.basename(this.rootDir);
9777
+ let projectName = path20.basename(this.rootDir);
9082
9778
  try {
9083
- const pkgPath = path19.join(this.rootDir, "package.json");
9084
- const pkgRaw = await fs19.readFile(pkgPath, "utf-8");
9779
+ const pkgPath = path20.join(this.rootDir, "package.json");
9780
+ const pkgRaw = await fs20.readFile(pkgPath, "utf-8");
9085
9781
  const pkg = JSON.parse(pkgRaw);
9086
9782
  if (pkg.name) projectName = pkg.name;
9087
9783
  } catch {
@@ -9122,8 +9818,8 @@ var ProjectScanner = class {
9122
9818
  };
9123
9819
 
9124
9820
  // src/blueprint/generator.ts
9125
- import * as fs20 from "fs/promises";
9126
- import * as path20 from "path";
9821
+ import * as fs21 from "fs/promises";
9822
+ import * as path21 from "path";
9127
9823
  import * as ejs from "ejs";
9128
9824
 
9129
9825
  // src/blueprint/templates.ts
@@ -9207,19 +9903,19 @@ var BlueprintGenerator = class {
9207
9903
  styles: STYLES,
9208
9904
  scripts: SCRIPTS
9209
9905
  });
9210
- await fs20.mkdir(options.outputDir, { recursive: true });
9211
- await fs20.writeFile(path20.join(options.outputDir, "index.html"), html);
9906
+ await fs21.mkdir(options.outputDir, { recursive: true });
9907
+ await fs21.writeFile(path21.join(options.outputDir, "index.html"), html);
9212
9908
  }
9213
9909
  };
9214
9910
 
9215
9911
  // src/update-checker.ts
9216
- import * as fs21 from "fs";
9217
- import * as path21 from "path";
9912
+ import * as fs22 from "fs";
9913
+ import * as path22 from "path";
9218
9914
  import * as os from "os";
9219
9915
  import { spawn } from "child_process";
9220
9916
  function getStatePath() {
9221
9917
  const home = process.env["HOME"] || os.homedir();
9222
- return path21.join(home, ".harness", "update-check.json");
9918
+ return path22.join(home, ".harness", "update-check.json");
9223
9919
  }
9224
9920
  function isUpdateCheckEnabled(configInterval) {
9225
9921
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -9232,7 +9928,7 @@ function shouldRunCheck(state, intervalMs) {
9232
9928
  }
9233
9929
  function readCheckState() {
9234
9930
  try {
9235
- const raw = fs21.readFileSync(getStatePath(), "utf-8");
9931
+ const raw = fs22.readFileSync(getStatePath(), "utf-8");
9236
9932
  const parsed = JSON.parse(raw);
9237
9933
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
9238
9934
  const state = parsed;
@@ -9249,7 +9945,7 @@ function readCheckState() {
9249
9945
  }
9250
9946
  function spawnBackgroundCheck(currentVersion) {
9251
9947
  const statePath = getStatePath();
9252
- const stateDir = path21.dirname(statePath);
9948
+ const stateDir = path22.dirname(statePath);
9253
9949
  const script = `
9254
9950
  const { execSync } = require('child_process');
9255
9951
  const fs = require('fs');
@@ -9302,8 +9998,410 @@ function getUpdateNotification(currentVersion) {
9302
9998
  Run "harness update" to upgrade.`;
9303
9999
  }
9304
10000
 
10001
+ // src/code-nav/types.ts
10002
+ var EXTENSION_MAP = {
10003
+ ".ts": "typescript",
10004
+ ".tsx": "typescript",
10005
+ ".mts": "typescript",
10006
+ ".cts": "typescript",
10007
+ ".js": "javascript",
10008
+ ".jsx": "javascript",
10009
+ ".mjs": "javascript",
10010
+ ".cjs": "javascript",
10011
+ ".py": "python"
10012
+ };
10013
+ function detectLanguage(filePath) {
10014
+ const ext = filePath.slice(filePath.lastIndexOf("."));
10015
+ return EXTENSION_MAP[ext] ?? null;
10016
+ }
10017
+
10018
+ // src/code-nav/parser.ts
10019
+ import Parser from "web-tree-sitter";
10020
+ var parserCache = /* @__PURE__ */ new Map();
10021
+ var initialized = false;
10022
+ var GRAMMAR_MAP = {
10023
+ typescript: "tree-sitter-typescript",
10024
+ javascript: "tree-sitter-javascript",
10025
+ python: "tree-sitter-python"
10026
+ };
10027
+ async function ensureInit() {
10028
+ if (!initialized) {
10029
+ await Parser.init();
10030
+ initialized = true;
10031
+ }
10032
+ }
10033
+ async function resolveWasmPath(grammarName) {
10034
+ const { createRequire } = await import("module");
10035
+ const require2 = createRequire(import.meta.url ?? __filename);
10036
+ const pkgPath = require2.resolve("tree-sitter-wasms/package.json");
10037
+ const path23 = await import("path");
10038
+ const pkgDir = path23.dirname(pkgPath);
10039
+ return path23.join(pkgDir, "out", `${grammarName}.wasm`);
10040
+ }
10041
+ async function loadLanguage(lang) {
10042
+ const grammarName = GRAMMAR_MAP[lang];
10043
+ const wasmPath = await resolveWasmPath(grammarName);
10044
+ return Parser.Language.load(wasmPath);
10045
+ }
10046
+ async function getParser(lang) {
10047
+ const cached = parserCache.get(lang);
10048
+ if (cached) return cached;
10049
+ await ensureInit();
10050
+ const parser = new Parser();
10051
+ const language = await loadLanguage(lang);
10052
+ parser.setLanguage(language);
10053
+ parserCache.set(lang, parser);
10054
+ return parser;
10055
+ }
10056
+ async function parseFile(filePath) {
10057
+ const lang = detectLanguage(filePath);
10058
+ if (!lang) {
10059
+ return Err({
10060
+ code: "UNSUPPORTED_LANGUAGE",
10061
+ message: `Unsupported file extension: ${filePath}`
10062
+ });
10063
+ }
10064
+ const contentResult = await readFileContent(filePath);
10065
+ if (!contentResult.ok) {
10066
+ return Err({
10067
+ code: "FILE_NOT_FOUND",
10068
+ message: `Cannot read file: ${filePath}`
10069
+ });
10070
+ }
10071
+ try {
10072
+ const parser = await getParser(lang);
10073
+ const tree = parser.parse(contentResult.value);
10074
+ return Ok({ tree, language: lang, source: contentResult.value, filePath });
10075
+ } catch (e) {
10076
+ return Err({
10077
+ code: "PARSE_FAILED",
10078
+ message: `Tree-sitter parse failed for ${filePath}: ${e.message}`
10079
+ });
10080
+ }
10081
+ }
10082
+ function resetParserCache() {
10083
+ parserCache.clear();
10084
+ initialized = false;
10085
+ }
10086
+
10087
+ // src/code-nav/outline.ts
10088
+ var TOP_LEVEL_TYPES = {
10089
+ typescript: {
10090
+ function_declaration: "function",
10091
+ class_declaration: "class",
10092
+ interface_declaration: "interface",
10093
+ type_alias_declaration: "type",
10094
+ lexical_declaration: "variable",
10095
+ variable_declaration: "variable",
10096
+ export_statement: "export",
10097
+ import_statement: "import",
10098
+ enum_declaration: "type"
10099
+ },
10100
+ javascript: {
10101
+ function_declaration: "function",
10102
+ class_declaration: "class",
10103
+ lexical_declaration: "variable",
10104
+ variable_declaration: "variable",
10105
+ export_statement: "export",
10106
+ import_statement: "import"
10107
+ },
10108
+ python: {
10109
+ function_definition: "function",
10110
+ class_definition: "class",
10111
+ assignment: "variable",
10112
+ import_statement: "import",
10113
+ import_from_statement: "import"
10114
+ }
10115
+ };
10116
+ var METHOD_TYPES = {
10117
+ typescript: ["method_definition", "public_field_definition"],
10118
+ javascript: ["method_definition"],
10119
+ python: ["function_definition"]
10120
+ };
10121
+ var IDENTIFIER_TYPES = /* @__PURE__ */ new Set(["identifier", "property_identifier", "type_identifier"]);
10122
+ function findIdentifier(node) {
10123
+ return node.childForFieldName("name") ?? node.children.find((c) => IDENTIFIER_TYPES.has(c.type)) ?? null;
10124
+ }
10125
+ function getVariableDeclarationName(node) {
10126
+ const declarator = node.children.find((c) => c.type === "variable_declarator");
10127
+ if (!declarator) return null;
10128
+ const id = findIdentifier(declarator);
10129
+ return id?.text ?? null;
10130
+ }
10131
+ function getExportName(node, source) {
10132
+ const decl = node.children.find(
10133
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== "comment"
10134
+ );
10135
+ return decl ? getNodeName(decl, source) : "<anonymous>";
10136
+ }
10137
+ function getAssignmentName(node) {
10138
+ const left = node.childForFieldName("left") ?? node.children[0];
10139
+ return left?.text ?? "<anonymous>";
10140
+ }
10141
+ function getNodeName(node, source) {
10142
+ const id = findIdentifier(node);
10143
+ if (id) return id.text;
10144
+ const isVarDecl = node.type === "lexical_declaration" || node.type === "variable_declaration";
10145
+ if (isVarDecl) return getVariableDeclarationName(node) ?? "<anonymous>";
10146
+ if (node.type === "export_statement") return getExportName(node, source);
10147
+ if (node.type === "assignment") return getAssignmentName(node);
10148
+ return "<anonymous>";
10149
+ }
10150
+ function getSignature(node, source) {
10151
+ const startLine = node.startPosition.row;
10152
+ const lines = source.split("\n");
10153
+ return (lines[startLine] ?? "").trim();
10154
+ }
10155
+ function extractMethods(classNode, language, source, filePath) {
10156
+ const methodTypes = METHOD_TYPES[language] ?? [];
10157
+ const body = classNode.childForFieldName("body") ?? classNode.children.find((c) => c.type === "class_body" || c.type === "block");
10158
+ if (!body) return [];
10159
+ return body.children.filter((child) => methodTypes.includes(child.type)).map((child) => ({
10160
+ name: getNodeName(child, source),
10161
+ kind: "method",
10162
+ file: filePath,
10163
+ line: child.startPosition.row + 1,
10164
+ endLine: child.endPosition.row + 1,
10165
+ signature: getSignature(child, source)
10166
+ }));
10167
+ }
10168
+ function nodeToSymbol(node, kind, source, filePath) {
10169
+ return {
10170
+ name: getNodeName(node, source),
10171
+ kind,
10172
+ file: filePath,
10173
+ line: node.startPosition.row + 1,
10174
+ endLine: node.endPosition.row + 1,
10175
+ signature: getSignature(node, source)
10176
+ };
10177
+ }
10178
+ function processExportStatement(child, topLevelTypes, lang, source, filePath) {
10179
+ const declaration = child.children.find(
10180
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== ";" && c.type !== "comment"
10181
+ );
10182
+ const kind = declaration ? topLevelTypes[declaration.type] : void 0;
10183
+ if (declaration && kind) {
10184
+ const sym = nodeToSymbol(child, kind, source, filePath);
10185
+ sym.name = getNodeName(declaration, source);
10186
+ if (kind === "class") {
10187
+ sym.children = extractMethods(declaration, lang, source, filePath);
10188
+ }
10189
+ return sym;
10190
+ }
10191
+ return nodeToSymbol(child, "export", source, filePath);
10192
+ }
10193
+ function extractSymbols(rootNode, lang, source, filePath) {
10194
+ const symbols = [];
10195
+ const topLevelTypes = TOP_LEVEL_TYPES[lang] ?? {};
10196
+ for (const child of rootNode.children) {
10197
+ if (child.type === "export_statement") {
10198
+ symbols.push(processExportStatement(child, topLevelTypes, lang, source, filePath));
10199
+ continue;
10200
+ }
10201
+ const kind = topLevelTypes[child.type];
10202
+ if (!kind || kind === "import") continue;
10203
+ const sym = nodeToSymbol(child, kind, source, filePath);
10204
+ if (kind === "class") {
10205
+ sym.children = extractMethods(child, lang, source, filePath);
10206
+ }
10207
+ symbols.push(sym);
10208
+ }
10209
+ return symbols;
10210
+ }
10211
+ function buildFailedResult(filePath, lang) {
10212
+ return { file: filePath, language: lang, totalLines: 0, symbols: [], error: "[parse-failed]" };
10213
+ }
10214
+ async function getOutline(filePath) {
10215
+ const lang = detectLanguage(filePath);
10216
+ if (!lang) return buildFailedResult(filePath, "unknown");
10217
+ const result = await parseFile(filePath);
10218
+ if (!result.ok) return buildFailedResult(filePath, lang);
10219
+ const { tree, source } = result.value;
10220
+ const totalLines = source.split("\n").length;
10221
+ const symbols = extractSymbols(tree.rootNode, lang, source, filePath);
10222
+ return { file: filePath, language: lang, totalLines, symbols };
10223
+ }
10224
+ function formatOutline(outline) {
10225
+ if (outline.error) {
10226
+ return `${outline.file} ${outline.error}`;
10227
+ }
10228
+ const lines = [`${outline.file} (${outline.totalLines} lines)`];
10229
+ const last = outline.symbols.length - 1;
10230
+ outline.symbols.forEach((sym, i) => {
10231
+ const prefix = i === last ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
10232
+ lines.push(`${prefix} ${sym.signature} :${sym.line}`);
10233
+ if (sym.children) {
10234
+ const childLast = sym.children.length - 1;
10235
+ sym.children.forEach((child, j) => {
10236
+ const childConnector = i === last ? " " : "\u2502 ";
10237
+ const childPrefix = j === childLast ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
10238
+ lines.push(`${childConnector}${childPrefix} ${child.signature} :${child.line}`);
10239
+ });
10240
+ }
10241
+ });
10242
+ return lines.join("\n");
10243
+ }
10244
+
10245
+ // src/code-nav/search.ts
10246
+ function buildGlob(directory, fileGlob) {
10247
+ const dir = directory.replaceAll("\\", "/");
10248
+ if (fileGlob) {
10249
+ return `${dir}/**/${fileGlob}`;
10250
+ }
10251
+ const exts = Object.keys(EXTENSION_MAP).map((e) => e.slice(1));
10252
+ return `${dir}/**/*.{${exts.join(",")}}`;
10253
+ }
10254
+ function matchesQuery(name, query) {
10255
+ return name.toLowerCase().includes(query.toLowerCase());
10256
+ }
10257
+ function flattenSymbols(symbols) {
10258
+ const flat = [];
10259
+ for (const sym of symbols) {
10260
+ flat.push(sym);
10261
+ if (sym.children) {
10262
+ flat.push(...sym.children);
10263
+ }
10264
+ }
10265
+ return flat;
10266
+ }
10267
+ async function searchSymbols(query, directory, fileGlob) {
10268
+ const pattern = buildGlob(directory, fileGlob);
10269
+ let files;
10270
+ try {
10271
+ files = await findFiles(pattern, directory);
10272
+ } catch {
10273
+ files = [];
10274
+ }
10275
+ const matches = [];
10276
+ const skipped = [];
10277
+ for (const file of files) {
10278
+ const lang = detectLanguage(file);
10279
+ if (!lang) {
10280
+ skipped.push(file);
10281
+ continue;
10282
+ }
10283
+ const outline = await getOutline(file);
10284
+ if (outline.error) {
10285
+ skipped.push(file);
10286
+ continue;
10287
+ }
10288
+ const allSymbols = flattenSymbols(outline.symbols);
10289
+ for (const sym of allSymbols) {
10290
+ if (matchesQuery(sym.name, query)) {
10291
+ matches.push({
10292
+ symbol: sym,
10293
+ context: sym.signature
10294
+ });
10295
+ }
10296
+ }
10297
+ }
10298
+ return { query, matches, skipped };
10299
+ }
10300
+
10301
+ // src/code-nav/unfold.ts
10302
+ function findSymbolInList(symbols, name) {
10303
+ for (const sym of symbols) {
10304
+ if (sym.name === name) return sym;
10305
+ if (sym.children) {
10306
+ const found = findSymbolInList(sym.children, name);
10307
+ if (found) return found;
10308
+ }
10309
+ }
10310
+ return null;
10311
+ }
10312
+ function extractLines(source, startLine, endLine) {
10313
+ const lines = source.split("\n");
10314
+ const start = Math.max(0, startLine - 1);
10315
+ const end = Math.min(lines.length, endLine);
10316
+ return lines.slice(start, end).join("\n");
10317
+ }
10318
+ function buildFallbackResult(filePath, symbolName, content, language) {
10319
+ const totalLines = content ? content.split("\n").length : 0;
10320
+ return {
10321
+ file: filePath,
10322
+ symbolName,
10323
+ startLine: content ? 1 : 0,
10324
+ endLine: totalLines,
10325
+ content,
10326
+ language,
10327
+ fallback: true,
10328
+ warning: "[fallback: raw content]"
10329
+ };
10330
+ }
10331
+ async function readContentSafe(filePath) {
10332
+ const result = await readFileContent(filePath);
10333
+ return result.ok ? result.value : "";
10334
+ }
10335
+ async function unfoldSymbol(filePath, symbolName) {
10336
+ const lang = detectLanguage(filePath);
10337
+ if (!lang) {
10338
+ const content2 = await readContentSafe(filePath);
10339
+ return buildFallbackResult(filePath, symbolName, content2, "unknown");
10340
+ }
10341
+ const outline = await getOutline(filePath);
10342
+ if (outline.error) {
10343
+ const content2 = await readContentSafe(filePath);
10344
+ return buildFallbackResult(filePath, symbolName, content2, lang);
10345
+ }
10346
+ const symbol = findSymbolInList(outline.symbols, symbolName);
10347
+ if (!symbol) {
10348
+ const content2 = await readContentSafe(filePath);
10349
+ return buildFallbackResult(filePath, symbolName, content2, lang);
10350
+ }
10351
+ const parseResult = await parseFile(filePath);
10352
+ if (!parseResult.ok) {
10353
+ const content2 = await readContentSafe(filePath);
10354
+ return {
10355
+ ...buildFallbackResult(
10356
+ filePath,
10357
+ symbolName,
10358
+ extractLines(content2, symbol.line, symbol.endLine),
10359
+ lang
10360
+ ),
10361
+ startLine: symbol.line,
10362
+ endLine: symbol.endLine
10363
+ };
10364
+ }
10365
+ const content = extractLines(parseResult.value.source, symbol.line, symbol.endLine);
10366
+ return {
10367
+ file: filePath,
10368
+ symbolName,
10369
+ startLine: symbol.line,
10370
+ endLine: symbol.endLine,
10371
+ content,
10372
+ language: lang,
10373
+ fallback: false
10374
+ };
10375
+ }
10376
+ async function unfoldRange(filePath, startLine, endLine) {
10377
+ const lang = detectLanguage(filePath) ?? "unknown";
10378
+ const contentResult = await readFileContent(filePath);
10379
+ if (!contentResult.ok) {
10380
+ return {
10381
+ file: filePath,
10382
+ startLine: 0,
10383
+ endLine: 0,
10384
+ content: "",
10385
+ language: lang,
10386
+ fallback: true,
10387
+ warning: "[fallback: raw content]"
10388
+ };
10389
+ }
10390
+ const totalLines = contentResult.value.split("\n").length;
10391
+ const clampedEnd = Math.min(endLine, totalLines);
10392
+ const content = extractLines(contentResult.value, startLine, clampedEnd);
10393
+ return {
10394
+ file: filePath,
10395
+ startLine,
10396
+ endLine: clampedEnd,
10397
+ content,
10398
+ language: lang,
10399
+ fallback: false
10400
+ };
10401
+ }
10402
+
9305
10403
  // src/index.ts
9306
- var VERSION = "0.14.0";
10404
+ var VERSION = "0.15.0";
9307
10405
  export {
9308
10406
  AGENT_DESCRIPTORS,
9309
10407
  ARCHITECTURE_DESCRIPTOR,
@@ -9337,6 +10435,7 @@ export {
9337
10435
  DEFAULT_STATE,
9338
10436
  DEFAULT_STREAM_INDEX,
9339
10437
  DepDepthCollector,
10438
+ EXTENSION_MAP,
9340
10439
  EmitInteractionInputSchema,
9341
10440
  EntropyAnalyzer,
9342
10441
  EntropyConfigSchema,
@@ -9371,6 +10470,7 @@ export {
9371
10470
  SharableForbiddenImportSchema,
9372
10471
  SharableLayerSchema,
9373
10472
  SharableSecurityRulesSchema,
10473
+ SkillEventSchema,
9374
10474
  StreamIndexSchema,
9375
10475
  StreamInfoSchema,
9376
10476
  ThresholdConfigSchema,
@@ -9379,6 +10479,7 @@ export {
9379
10479
  VERSION,
9380
10480
  ViolationSchema,
9381
10481
  addProvenance,
10482
+ agentConfigRules,
9382
10483
  analyzeDiff,
9383
10484
  analyzeLearningPatterns,
9384
10485
  appendFailure,
@@ -9386,6 +10487,7 @@ export {
9386
10487
  appendSessionEntry,
9387
10488
  applyFixes,
9388
10489
  applyHotspotDowngrade,
10490
+ applySyncChanges,
9389
10491
  archMatchers,
9390
10492
  archModule,
9391
10493
  architecture,
@@ -9400,12 +10502,14 @@ export {
9400
10502
  checkEligibility,
9401
10503
  checkEvidenceCoverage,
9402
10504
  classifyFinding,
10505
+ clearEventHashCache,
9403
10506
  clearFailuresCache,
9404
10507
  clearLearningsCache,
9405
10508
  configureFeedback,
9406
10509
  constraintRuleId,
9407
10510
  contextBudget,
9408
10511
  contextFilter,
10512
+ countLearningEntries,
9409
10513
  createBoundaryValidator,
9410
10514
  createCommentedCodeFixes,
9411
10515
  createError,
@@ -9429,27 +10533,34 @@ export {
9429
10533
  detectCouplingViolations,
9430
10534
  detectDeadCode,
9431
10535
  detectDocDrift,
10536
+ detectLanguage,
9432
10537
  detectPatternViolations,
9433
10538
  detectSizeBudgetViolations,
9434
10539
  detectStack,
9435
10540
  detectStaleConstraints,
9436
10541
  determineAssessment,
9437
10542
  diff,
10543
+ emitEvent,
9438
10544
  executeWorkflow,
9439
10545
  expressRules,
9440
10546
  extractBundle,
10547
+ extractIndexEntry,
9441
10548
  extractMarkdownLinks,
9442
10549
  extractSections,
9443
10550
  fanOutReview,
10551
+ formatEventTimeline,
9444
10552
  formatFindingBlock,
9445
10553
  formatGitHubComment,
9446
10554
  formatGitHubSummary,
10555
+ formatOutline,
9447
10556
  formatTerminalOutput,
9448
10557
  generateAgentsMap,
9449
10558
  generateSuggestions,
9450
10559
  getActionEmitter,
9451
10560
  getExitCode,
9452
10561
  getFeedbackConfig,
10562
+ getOutline,
10563
+ getParser,
9453
10564
  getPhaseCategories,
9454
10565
  getStreamForBranch,
9455
10566
  getUpdateNotification,
@@ -9460,24 +10571,30 @@ export {
9460
10571
  listActiveSessions,
9461
10572
  listStreams,
9462
10573
  loadBudgetedLearnings,
10574
+ loadEvents,
9463
10575
  loadFailures,
9464
10576
  loadHandoff,
10577
+ loadIndexEntries,
9465
10578
  loadRelevantLearnings,
9466
10579
  loadSessionSummary,
9467
10580
  loadState,
9468
10581
  loadStreamIndex,
9469
10582
  logAgentAction,
10583
+ mcpRules,
9470
10584
  migrateToStreams,
9471
10585
  networkRules,
9472
10586
  nodeRules,
9473
10587
  parseDateFromEntry,
9474
10588
  parseDiff,
10589
+ parseFile,
10590
+ parseFrontmatter,
9475
10591
  parseManifest,
9476
10592
  parseRoadmap,
9477
10593
  parseSecurityConfig,
9478
10594
  parseSize,
9479
10595
  pathTraversalRules,
9480
10596
  previewFix,
10597
+ promoteSessionLearnings,
9481
10598
  pruneLearnings,
9482
10599
  reactRules,
9483
10600
  readCheckState,
@@ -9489,6 +10606,7 @@ export {
9489
10606
  requestMultiplePeerReviews,
9490
10607
  requestPeerReview,
9491
10608
  resetFeedbackConfig,
10609
+ resetParserCache,
9492
10610
  resolveFileToLayer,
9493
10611
  resolveModelTier,
9494
10612
  resolveRuleSeverity,
@@ -9510,6 +10628,7 @@ export {
9510
10628
  saveState,
9511
10629
  saveStreamIndex,
9512
10630
  scopeContext,
10631
+ searchSymbols,
9513
10632
  secretRules,
9514
10633
  serializeRoadmap,
9515
10634
  setActiveStream,
@@ -9520,6 +10639,8 @@ export {
9520
10639
  tagUncitedFindings,
9521
10640
  touchStream,
9522
10641
  trackAction,
10642
+ unfoldRange,
10643
+ unfoldSymbol,
9523
10644
  updateSessionEntryStatus,
9524
10645
  updateSessionIndex,
9525
10646
  validateAgentsMap,